- Timestamp:
- Jun 13, 2016 7:12:51 AM (8 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 13 edited
- 2 moved
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/Makefile
r1932 r1933 312 312 mod_particle_attributes.f90 netcdf_interface_mod.f90 nudging_mod.f90 \ 313 313 package_parin.f90 palm.f90 parin.f90 plant_canopy_model_mod.f90 pmc_interface_mod.f90 \ 314 pmc_c lient_mod.f90 pmc_general_mod.f90 pmc_handle_communicator_mod.f90 \315 pmc_mpi_wrapper_mod.f90 pmc_ server_mod.f90 poisfft_mod.f90 poismg_mod.f90 \316 poismg_ noopt.f90 pres.f90 print_1d.f90 production_e.f90 \314 pmc_child_mod.f90 pmc_general_mod.f90 pmc_handle_communicator_mod.f90 \ 315 pmc_mpi_wrapper_mod.f90 pmc_parent_mod.f90 poisfft_mod.f90 poismg.f90 \ 316 poismg_fast_mod.f90 pres.f90 print_1d.f90 production_e.f90 \ 317 317 prognostic_equations.f90 progress_bar_mod.f90 radiation_model_mod.f90 \ 318 318 random_function_mod.f90 random_gauss.f90 random_generator_parallel_mod.f90 \ … … 495 495 radiation_model_mod.o microphysics_mod.o wind_turbine_model_mod.o 496 496 plant_canopy_model_mod.o: modules.o mod_kinds.o 497 pmc_interface_mod.o: modules.o mod_kinds.o pmc_c lient_mod.o pmc_general_mod.o\498 pmc_handle_communicator_mod.o pmc_mpi_wrapper_mod.o pmc_ server_mod.o499 pmc_c lient_mod.o: mod_kinds.o pmc_general_mod.o pmc_handle_communicator_mod.o\497 pmc_interface_mod.o: modules.o mod_kinds.o pmc_child_mod.o pmc_general_mod.o\ 498 pmc_handle_communicator_mod.o pmc_mpi_wrapper_mod.o pmc_parent_mod.o 499 pmc_child_mod.o: mod_kinds.o pmc_general_mod.o pmc_handle_communicator_mod.o\ 500 500 pmc_mpi_wrapper_mod.o 501 501 pmc_general_mod.o: mod_kinds.o 502 502 pmc_handle_communicator_mod.o: modules.o mod_kinds.o pmc_general_mod.o 503 503 pmc_mpi_wrapper_mod.o: pmc_handle_communicator_mod.o 504 pmc_ server_mod.o: pmc_general_mod.o pmc_handle_communicator_mod.o pmc_mpi_wrapper_mod.o504 pmc_parent_mod.o: pmc_general_mod.o pmc_handle_communicator_mod.o pmc_mpi_wrapper_mod.o 505 505 poisfft_mod.o: modules.o cpulog_mod.o fft_xy_mod.o mod_kinds.o tridia_solver_mod.o 506 506 poismg_mod.o: modules.o cpulog_mod.o mod_kinds.o -
palm/trunk/SOURCE/boundary_conds.f90
r1823 r1933 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 22 ! 21 ! 22 ! 23 23 ! Former revisions: 24 24 ! ----------------- 25 25 ! $Id$ 26 ! 27 ! 1823 2016-04-07 08:57:52Z hoffmann 28 ! Initial version of purely vertical nesting introduced. 26 29 ! 27 30 ! 1822 2016-04-07 07:49:42Z hoffmann … … 159 162 USE pegrid 160 163 164 USE pmc_interface, & 165 ONLY : nesting_mode 166 161 167 162 168 IMPLICIT NONE … … 359 365 ! 360 366 !-- The same restoration for u at i=nxl and v at j=nys as above must be made 361 !-- in case of nest boundaries. Note however, that the above ELSEIF-structure is 362 !-- not appropriate here as there may be more than one nest boundary on a 363 !-- PE-domain. Furthermore Neumann conditions for SGS-TKE are not required here. 364 IF ( nest_bound_s ) THEN 365 v_p(:,nys,:) = v_p(:,nys-1,:) 366 ENDIF 367 IF ( nest_bound_l ) THEN 368 u_p(:,:,nxl) = u_p(:,:,nxl-1) 367 !-- in case of nest boundaries. This must not be done in case of vertical nesting 368 !-- mode as in that case the lateral boundaries are actually cyclic. 369 IF ( nesting_mode /= 'vertical' ) THEN 370 IF ( nest_bound_s ) THEN 371 v_p(:,nys,:) = v_p(:,nys-1,:) 372 ENDIF 373 IF ( nest_bound_l ) THEN 374 u_p(:,:,nxl) = u_p(:,:,nxl-1) 375 ENDIF 369 376 ENDIF 370 377 -
palm/trunk/SOURCE/exchange_horiz.f90
r1818 r1933 277 277 DO k = nzb, nzt+1 278 278 ar(k,nys-nbgp_local+j,i) = ar(k,nyn-nbgp_local+1+j,i) 279 279 ar(k,nyn+1+j,i) = ar(k,nys+j,i) 280 280 ENDDO 281 281 ENDDO -
palm/trunk/SOURCE/exchange_horiz_2d.f90
r1818 r1933 25 25 ! $Id$ 26 26 ! 27 ! 1818 2016-04-06 15:53:27Z maronga 28 ! Initial version of purely vertical nesting introduced. 29 ! 27 30 ! 1804 2016-04-05 16:30:18Z maronga 28 31 ! Removed code for parameter file check (__check) … … 81 84 USE pegrid 82 85 86 USE pmc_interface, & 87 ONLY : nesting_mode 88 89 83 90 IMPLICIT NONE 84 91 … … 160 167 ! 161 168 !-- Neumann-conditions at inflow/outflow/nested boundaries 162 IF ( inflow_l .OR. outflow_l .OR. nest_bound_l ) THEN 163 DO i = nbgp, 1, -1 164 ar(:,nxl-i) = ar(:,nxl) 165 ENDDO 166 ENDIF 167 IF ( inflow_r .OR. outflow_r .OR. nest_bound_r ) THEN 168 DO i = 1, nbgp 169 ar(:,nxr+i) = ar(:,nxr) 170 ENDDO 171 ENDIF 172 IF ( inflow_s .OR. outflow_s .OR. nest_bound_s ) THEN 173 DO i = nbgp, 1, -1 174 ar(nys-i,:) = ar(nys,:) 175 ENDDO 176 ENDIF 177 IF ( inflow_n .OR. outflow_n .OR. nest_bound_n ) THEN 178 DO i = 1, nbgp 179 ar(nyn+i,:) = ar(nyn,:) 180 ENDDO 169 IF ( nesting_mode /= 'vertical' ) THEN 170 IF ( inflow_l .OR. outflow_l .OR. nest_bound_l ) THEN 171 DO i = nbgp, 1, -1 172 ar(:,nxl-i) = ar(:,nxl) 173 ENDDO 174 ENDIF 175 IF ( inflow_r .OR. outflow_r .OR. nest_bound_r ) THEN 176 DO i = 1, nbgp 177 ar(:,nxr+i) = ar(:,nxr) 178 ENDDO 179 ENDIF 180 IF ( inflow_s .OR. outflow_s .OR. nest_bound_s ) THEN 181 DO i = nbgp, 1, -1 182 ar(nys-i,:) = ar(nys,:) 183 ENDDO 184 ENDIF 185 IF ( inflow_n .OR. outflow_n .OR. nest_bound_n ) THEN 186 DO i = 1, nbgp 187 ar(nyn+i,:) = ar(nyn,:) 188 ENDDO 189 ENDIF 181 190 ENDIF 182 191 -
palm/trunk/SOURCE/init_pegrid.f90
r1923 r1933 25 25 ! $Id$ 26 26 ! 27 ! 1923 2016-05-31 16:37:07Z boeske 28 ! Initial version of purely vertical nesting introduced. 29 ! 27 30 ! 1922 2016-05-31 16:36:08Z boeske 28 31 ! Bugfix: array transposition checks restricted to cases if a fourier … … 153 156 maximum_parallel_io_streams, message_string, & 154 157 mg_switch_to_pe0_level, momentum_advec, nest_bound_l, & 155 nest_bound_n, nest_bound_r, nest_bound_s, ne utral, psolver,&156 outflow_l, outflow_n, outflow_r, outflow_s, recycling_width,&157 scalar_advec, subdomain_size158 nest_bound_n, nest_bound_r, nest_bound_s, nest_domain, neutral, & 159 psolver, outflow_l, outflow_n, outflow_r, outflow_s, & 160 recycling_width, scalar_advec, subdomain_size 158 161 159 162 USE grid_variables, & … … 170 173 171 174 USE pegrid 172 175 176 USE pmc_interface, & 177 ONLY: nesting_mode 178 173 179 USE spectra_mod, & 174 180 ONLY: calculate_spectra, dt_dosp … … 1092 1098 #if defined( __parallel ) 1093 1099 ! 1094 !-- Setting of flags for inflow/outflow/nesting conditions in case of non-cyclic 1095 !-- horizontal boundary conditions. 1096 IF ( pleft == MPI_PROC_NULL ) THEN 1097 IF ( bc_lr == 'dirichlet/radiation' ) THEN 1098 inflow_l = .TRUE. 1099 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1100 outflow_l = .TRUE. 1101 ELSEIF ( bc_lr == 'nested' ) THEN 1100 !-- Setting of flags for inflow/outflow/nesting conditions. 1101 IF ( nesting_mode == 'vertical' .AND. nest_domain ) THEN 1102 IF ( nxl == 0 ) THEN 1102 1103 nest_bound_l = .TRUE. 1103 1104 ENDIF 1104 ENDIF 1105 1106 IF ( pright == MPI_PROC_NULL ) THEN 1107 IF ( bc_lr == 'dirichlet/radiation' ) THEN 1108 outflow_r = .TRUE. 1109 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1110 inflow_r = .TRUE. 1111 ELSEIF ( bc_lr == 'nested' ) THEN 1105 ELSE 1106 IF ( pleft == MPI_PROC_NULL ) THEN 1107 IF ( bc_lr == 'dirichlet/radiation' ) THEN 1108 inflow_l = .TRUE. 1109 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1110 outflow_l = .TRUE. 1111 ELSEIF ( bc_lr == 'nested' ) THEN 1112 nest_bound_l = .TRUE. 1113 ENDIF 1114 ENDIF 1115 ENDIF 1116 1117 IF ( nesting_mode == 'vertical' .AND. nest_domain ) THEN 1118 IF ( nxr == nx ) THEN 1112 1119 nest_bound_r = .TRUE. 1113 1120 ENDIF 1114 ENDIF 1115 1116 IF ( psouth == MPI_PROC_NULL ) THEN 1117 IF ( bc_ns == 'dirichlet/radiation' ) THEN 1118 outflow_s = .TRUE. 1119 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1120 inflow_s = .TRUE. 1121 ELSEIF ( bc_ns == 'nested' ) THEN 1121 ELSE 1122 IF ( pright == MPI_PROC_NULL ) THEN 1123 IF ( bc_lr == 'dirichlet/radiation' ) THEN 1124 outflow_r = .TRUE. 1125 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1126 inflow_r = .TRUE. 1127 ELSEIF ( bc_lr == 'nested' ) THEN 1128 nest_bound_r = .TRUE. 1129 ENDIF 1130 ENDIF 1131 ENDIF 1132 1133 IF ( nesting_mode == 'vertical' .AND. nest_domain ) THEN 1134 IF ( nys == 0 ) THEN 1122 1135 nest_bound_s = .TRUE. 1123 1136 ENDIF 1124 ENDIF 1125 1126 IF ( pnorth == MPI_PROC_NULL ) THEN 1127 IF ( bc_ns == 'dirichlet/radiation' ) THEN 1128 inflow_n = .TRUE. 1129 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1130 outflow_n = .TRUE. 1131 ELSEIF ( bc_ns == 'nested' ) THEN 1137 ELSE 1138 IF ( psouth == MPI_PROC_NULL ) THEN 1139 IF ( bc_ns == 'dirichlet/radiation' ) THEN 1140 outflow_s = .TRUE. 1141 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1142 inflow_s = .TRUE. 1143 ELSEIF ( bc_ns == 'nested' ) THEN 1144 nest_bound_s = .TRUE. 1145 ENDIF 1146 ENDIF 1147 ENDIF 1148 1149 IF ( nesting_mode == 'vertical' .AND. nest_domain ) THEN 1150 IF ( nyn == ny ) THEN 1132 1151 nest_bound_n = .TRUE. 1133 1152 ENDIF 1134 ENDIF 1135 1153 ELSE 1154 IF ( pnorth == MPI_PROC_NULL ) THEN 1155 IF ( bc_ns == 'dirichlet/radiation' ) THEN 1156 inflow_n = .TRUE. 1157 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1158 outflow_n = .TRUE. 1159 ELSEIF ( bc_ns == 'nested' ) THEN 1160 nest_bound_n = .TRUE. 1161 ENDIF 1162 ENDIF 1163 ENDIF 1164 1136 1165 ! 1137 1166 !-- Broadcast the id of the inflow PE -
palm/trunk/SOURCE/palm.f90
r1834 r1933 25 25 ! $Id$ 26 26 ! 27 ! 1834 2016-04-07 14:34:20Z raasch 28 ! Initial version of purely vertical nesting introduced. 29 ! 27 30 ! 1833 2016-04-07 14:23:03Z raasch 28 31 ! required user interface version changed … … 134 137 ONLY: constant_diffusion, coupling_char, coupling_mode, & 135 138 do2d_at_begin, do3d_at_begin, humidity, io_blocks, io_group, & 136 large_scale_forcing, message_string, nest_domain, n udging, &137 passive_scalar, simulated_time, simulated_time_chr,&139 large_scale_forcing, message_string, nest_domain, neutral, & 140 nudging, passive_scalar, simulated_time, simulated_time_chr, & 138 141 user_interface_current_revision, & 139 142 user_interface_required_revision, version, wall_heatflux, & … … 168 171 169 172 USE pmc_interface, & 170 ONLY: cpl_id, nested_run, pmci_c lient_initialize, pmci_init,&171 pmci_modelconfiguration, pmci_ server_initialize173 ONLY: cpl_id, nested_run, pmci_child_initialize, pmci_init, & 174 pmci_modelconfiguration, pmci_parent_initialize 172 175 173 176 USE statistics, & … … 341 344 CALL pmci_modelconfiguration 342 345 ! 343 !-- Receive and interpolate initial data on c lient.344 !-- C lient initialization must be made first if the model is both clientand345 !-- server346 CALL pmci_c lient_initialize347 ! 348 !-- Send initial condition data from server to client349 CALL pmci_ server_initialize346 !-- Receive and interpolate initial data on children. 347 !-- Child initialization must be made first if the model is both child and 348 !-- parent 349 CALL pmci_child_initialize 350 ! 351 !-- Send initial condition data from parent to children 352 CALL pmci_parent_initialize 350 353 ! 351 354 !-- Exchange_horiz is needed after the nest initialization … … 354 357 CALL exchange_horiz( v, nbgp ) 355 358 CALL exchange_horiz( w, nbgp ) 356 CALL exchange_horiz( pt, nbgp ) 359 IF ( .NOT. neutral ) THEN 360 CALL exchange_horiz( pt, nbgp ) 361 ENDIF 357 362 IF ( .NOT. constant_diffusion ) CALL exchange_horiz( e, nbgp ) 358 363 IF (humidity .OR. passive_scalar) THEN -
palm/trunk/SOURCE/parin.f90
r1917 r1933 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! 22 22 ! 23 23 ! Former revisions: … … 25 25 ! $Id$ 26 26 ! 27 ! 1917 2016-05-27 14:28:12Z witha 28 ! Initial version of purely vertical nesting introduced. 29 ! 27 30 ! 1914 2016-05-26 14:44:07Z witha 28 31 ! Added call to wind turbine model for reading of &wind_turbine_par … … 245 248 246 249 USE pmc_interface, & 247 ONLY: nested_run 250 ONLY: nested_run, nesting_mode 248 251 249 252 USE profil_parameter, & … … 429 432 430 433 ! 431 !-- In case of nested runs, explicitly set nesting boundary conditions 432 !-- except for the root domain. This will overwrite the user settings. 433 IF ( nest_domain ) THEN 434 bc_lr = 'nested' 435 bc_ns = 'nested' 436 bc_uv_t = 'nested' 437 bc_pt_t = 'nested' 438 bc_q_t = 'nested' 439 bc_p_t = 'neumann' 434 !-- In case of nested runs, explicitly set nesting boundary conditions. 435 !-- This will overwrite the user settings. bc_lr and bc_ns always need 436 !-- to be cyclic for vertical nesting. 437 IF ( nesting_mode == 'vertical' ) THEN 438 IF (bc_lr /= 'cyclic' .OR. bc_ns /= 'cyclic' ) THEN 439 WRITE ( message_string, *) 'bc_lr and bc_ns were set to ,', & 440 'cyclic for vertical nesting' 441 CALL message( 'parin', 'PA0428', 0, 0, 0, 6, 0 ) 442 bc_lr = 'cyclic' 443 bc_ns = 'cyclic' 444 ENDIF 445 IF ( nest_domain ) THEN 446 bc_uv_t = 'nested' 447 bc_pt_t = 'nested' 448 bc_q_t = 'nested' 449 bc_p_t = 'neumann' 450 ENDIF 451 ELSE 452 453 ! 454 !-- For other nesting modes only set boundary conditions for 455 !-- nested domains. 456 IF ( nest_domain ) THEN 457 bc_lr = 'nested' 458 bc_ns = 'nested' 459 bc_uv_t = 'nested' 460 bc_pt_t = 'nested' 461 bc_q_t = 'nested' 462 bc_p_t = 'neumann' 463 ENDIF 440 464 ENDIF 465 441 466 ! 442 467 !-- Check validity of lateral boundary conditions. This has to be done -
palm/trunk/SOURCE/pmc_child_mod.f90
r1932 r1933 1 MODULE pmc_c lient2 3 !------------------------------------------------------------------------------- -!1 MODULE pmc_child 2 3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------- -!18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: 21 21 ! ------------------ 22 22 ! 23 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 ! 28 ! 1897 2016-05-03 08:10:23Z raasch 29 ! Module renamed. Code clean up. The words server/client changed to parent/child. 27 30 ! 28 31 ! 1896 2016-05-03 08:06:41Z raasch … … 47 50 ! 48 51 ! 1786 2016-03-08 05:49:27Z raasch 49 ! change in c lient-server data transfer: server now gets data from client50 ! instead that client put's it to the server52 ! change in child-parent data transfer: parent now gets data from child 53 ! instead of that child puts it to the parent 51 54 ! 52 55 ! 1783 2016-03-06 18:36:17Z raasch … … 67 70 ! ------------ 68 71 ! 69 ! C lientpart of Palm Model Coupler70 !------------------------------------------------------------------------------ !72 ! Child part of Palm Model Coupler 73 !-------------------------------------------------------------------------------! 71 74 72 75 #if defined( __parallel ) … … 81 84 82 85 USE kinds 83 USE pmc_general, &84 ONLY: arraydef, c lientdef, da_desclen, da_namedef, da_namelen, pedef,&86 USE pmc_general, & 87 ONLY: arraydef, childdef, da_desclen, da_namedef, da_namelen, pedef, & 85 88 pmc_da_name_err, pmc_g_setname, pmc_max_array, pmc_status_ok 86 89 87 USE pmc_handle_communicator, & 88 ONLY: m_model_comm, m_model_npes, m_model_rank, m_to_server_comm 89 90 USE pmc_mpi_wrapper, & 91 ONLY: pmc_alloc_mem, pmc_bcast, pmc_inter_bcast, & 92 pmc_recv_from_server, pmc_send_to_server, pmc_time 90 USE pmc_handle_communicator, & 91 ONLY: m_model_comm, m_model_npes, m_model_rank, m_to_parent_comm 92 93 USE pmc_mpi_wrapper, & 94 ONLY: pmc_alloc_mem, pmc_bcast, pmc_inter_bcast, pmc_time 93 95 94 96 IMPLICIT NONE … … 97 99 SAVE 98 100 99 TYPE(c lientdef) :: me !<101 TYPE(childdef) :: me !< 100 102 101 103 INTEGER :: myindex = 0 !< counter and unique number for data arrays … … 103 105 104 106 105 INTERFACE pmc_c lientinit106 MODULE PROCEDURE pmc_c lientinit107 END INTERFACE PMC_ClientInit107 INTERFACE pmc_childinit 108 MODULE PROCEDURE pmc_childinit 109 END INTERFACE pmc_childinit 108 110 109 111 INTERFACE pmc_c_clear_next_array_list … … 142 144 143 145 144 PUBLIC pmc_c lientinit, pmc_c_clear_next_array_list, pmc_c_getbuffer,&145 pmc_c_getnextarray, pmc_c_putbuffer, pmc_c_setind_and_allocmem, &146 PUBLIC pmc_childinit, pmc_c_clear_next_array_list, pmc_c_getbuffer, & 147 pmc_c_getnextarray, pmc_c_putbuffer, pmc_c_setind_and_allocmem, & 146 148 pmc_c_set_dataarray, pmc_set_dataarray_name, pmc_c_get_2d_index_list 147 149 … … 150 152 151 153 152 SUBROUTINE pmc_c lientinit154 SUBROUTINE pmc_childinit 153 155 154 156 IMPLICIT NONE … … 160 162 !-- Get / define the MPI environment 161 163 me%model_comm = m_model_comm 162 me%inter_comm = m_to_ server_comm164 me%inter_comm = m_to_parent_comm 163 165 164 166 CALL MPI_COMM_RANK( me%model_comm, me%model_rank, istat ) 165 167 CALL MPI_COMM_SIZE( me%model_comm, me%model_npes, istat ) 166 168 CALL MPI_COMM_REMOTE_SIZE( me%inter_comm, me%inter_npes, istat ) 169 167 170 ! 168 171 !-- Intra-communicater is used for MPI_GET … … 173 176 174 177 ! 175 !-- Allocate an array of type arraydef for all serverPEs to store information178 !-- Allocate an array of type arraydef for all parent PEs to store information 176 179 !-- of then transfer array 177 180 DO i = 1, me%inter_npes … … 179 182 ENDDO 180 183 181 END SUBROUTINE pmc_c lientinit182 183 184 185 SUBROUTINE pmc_set_dataarray_name( serverarraydesc, serverarrayname,&186 c lientarraydesc, clientarrayname, istat )187 188 IMPLICIT NONE 189 190 CHARACTER(LEN=*), INTENT(IN) :: serverarrayname !<191 CHARACTER(LEN=*), INTENT(IN) :: serverarraydesc !<192 CHARACTER(LEN=*), INTENT(IN) :: c lientarrayname!<193 CHARACTER(LEN=*), INTENT(IN) :: c lientarraydesc!<184 END SUBROUTINE pmc_childinit 185 186 187 188 SUBROUTINE pmc_set_dataarray_name( parentarraydesc, parentarrayname, & 189 childarraydesc, childarrayname, istat ) 190 191 IMPLICIT NONE 192 193 CHARACTER(LEN=*), INTENT(IN) :: parentarrayname !< 194 CHARACTER(LEN=*), INTENT(IN) :: parentarraydesc !< 195 CHARACTER(LEN=*), INTENT(IN) :: childarrayname !< 196 CHARACTER(LEN=*), INTENT(IN) :: childarraydesc !< 194 197 195 198 INTEGER, INTENT(OUT) :: istat !< … … 204 207 205 208 istat = pmc_status_ok 209 206 210 ! 207 211 !-- Check length of array names 208 IF ( LEN( TRIM( serverarrayname) ) > da_namelen .OR.&209 LEN( TRIM( c lientarrayname) ) > da_namelen ) THEN212 IF ( LEN( TRIM( parentarrayname) ) > da_namelen .OR. & 213 LEN( TRIM( childarrayname) ) > da_namelen ) THEN 210 214 istat = pmc_da_name_err 211 215 ENDIF … … 214 218 myindex = myindex + 1 215 219 myname%couple_index = myIndex 216 myname% serverdesc = TRIM( serverarraydesc )217 myname%nameon server = TRIM( serverarrayname )218 myname%c lientdesc = TRIM( clientarraydesc )219 myname%nameonc lient = TRIM( clientarrayname )220 myname%parentdesc = TRIM( parentarraydesc ) 221 myname%nameonparent = TRIM( parentarrayname ) 222 myname%childdesc = TRIM( childarraydesc ) 223 myname%nameonchild = TRIM( childarrayname ) 220 224 ENDIF 221 225 222 226 ! 223 !-- Broadcat to all c lientPEs227 !-- Broadcat to all child PEs 224 228 !-- TODO: describe what is broadcast here and why it is done 225 229 CALL pmc_bcast( myname%couple_index, 0, comm=m_model_comm ) 226 CALL pmc_bcast( myname% serverdesc, 0, comm=m_model_comm )227 CALL pmc_bcast( myname%nameon server, 0, comm=m_model_comm )228 CALL pmc_bcast( myname%c lientdesc,0, comm=m_model_comm )229 CALL pmc_bcast( myname%nameonc lient,0, comm=m_model_comm )230 231 ! 232 !-- Broadcat to all serverPEs230 CALL pmc_bcast( myname%parentdesc, 0, comm=m_model_comm ) 231 CALL pmc_bcast( myname%nameonparent, 0, comm=m_model_comm ) 232 CALL pmc_bcast( myname%childdesc, 0, comm=m_model_comm ) 233 CALL pmc_bcast( myname%nameonchild, 0, comm=m_model_comm ) 234 235 ! 236 !-- Broadcat to all parent PEs 233 237 !-- TODO: describe what is broadcast here and why it is done 234 238 IF ( m_model_rank == 0 ) THEN … … 238 242 ENDIF 239 243 240 CALL pmc_bcast( myname%couple_index, mype, comm=m_to_ server_comm )241 CALL pmc_bcast( myname% serverdesc, mype, comm=m_to_server_comm )242 CALL pmc_bcast( myname%nameon server, mype, comm=m_to_server_comm )243 CALL pmc_bcast( myname%c lientdesc, mype, comm=m_to_server_comm )244 CALL pmc_bcast( myname%nameonc lient, mype, comm=m_to_server_comm )245 246 CALL pmc_g_setname( me, myname%couple_index, myname%nameonc lient)244 CALL pmc_bcast( myname%couple_index, mype, comm=m_to_parent_comm ) 245 CALL pmc_bcast( myname%parentdesc, mype, comm=m_to_parent_comm ) 246 CALL pmc_bcast( myname%nameonparent, mype, comm=m_to_parent_comm ) 247 CALL pmc_bcast( myname%childdesc, mype, comm=m_to_parent_comm ) 248 CALL pmc_bcast( myname%nameonchild, mype, comm=m_to_parent_comm ) 249 250 CALL pmc_g_setname( me, myname%couple_index, myname%nameonchild ) 247 251 248 252 END SUBROUTINE pmc_set_dataarray_name … … 269 273 ENDIF 270 274 271 CALL pmc_bcast( myname%couple_index, mype, comm=m_to_ server_comm )275 CALL pmc_bcast( myname%couple_index, mype, comm=m_to_parent_comm ) 272 276 273 277 END SUBROUTINE pmc_set_dataarray_name_lastentry … … 296 300 297 301 win_size = C_SIZEOF( dummy ) 298 CALL MPI_WIN_CREATE( dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm, &302 CALL MPI_WIN_CREATE( dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm, & 299 303 indwin, ierr ) 300 ! 301 !-- Open window on server side 304 305 ! 306 !-- Open window on parent side 302 307 !-- TODO: why is the next MPI routine called twice?? 303 308 CALL MPI_WIN_FENCE( 0, indwin, ierr ) 304 ! 305 !-- Close window on server side and open on client side 309 310 ! 311 !-- Close window on parent side and open on child side 306 312 CALL MPI_WIN_FENCE( 0, indwin, ierr ) 307 313 308 314 DO i = 1, me%inter_npes 309 315 disp = me%model_rank * 2 310 CALL MPI_GET( nrele((i-1)*2+1), 2, MPI_INTEGER, i-1, disp, 2, &316 CALL MPI_GET( nrele((i-1)*2+1), 2, MPI_INTEGER, i-1, disp, 2, & 311 317 MPI_INTEGER, indwin, ierr ) 312 318 ENDDO 319 313 320 ! 314 321 !-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is … … 336 343 ! 337 344 !-- Local buffer used in MPI_GET can but must not be inside the MPI Window. 338 !-- Here, we use a dummy for the MPI window because the serverPEs do not access345 !-- Here, we use a dummy for the MPI window because the parent PEs do not access 339 346 !-- the RMA window via MPI_GET or MPI_PUT 340 CALL MPI_WIN_CREATE( dummy, winsize, iwp, MPI_INFO_NULL, me%intra_comm, &347 CALL MPI_WIN_CREATE( dummy, winsize, iwp, MPI_INFO_NULL, me%intra_comm, & 341 348 indwin2, ierr ) 349 342 350 ! 343 351 !-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is … … 353 361 disp = nrele(2*(i-1)+1) 354 362 CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , i-1, 0, indwin2, ierr ) 355 CALL MPI_GET( myind, 2*nr, MPI_INTEGER, i-1, disp, 2*nr, &363 CALL MPI_GET( myind, 2*nr, MPI_INTEGER, i-1, disp, 2*nr, & 356 364 MPI_INTEGER, indwin2, ierr ) 357 365 CALL MPI_WIN_UNLOCK( i-1, indwin2, ierr ) … … 389 397 390 398 LOGICAL FUNCTION pmc_c_getnextarray( myname ) 399 391 400 ! 392 401 !-- List handling is still required to get minimal interaction with … … 403 412 404 413 ! 405 !-- Array names are the same on all c lientPEs, so take first PE to get the name414 !-- Array names are the same on all child PEs, so take first PE to get the name 406 415 ape => me%pes(1) 416 407 417 ! 408 418 !-- Check if all arrays have been processed … … 497 507 498 508 IMPLICIT NONE 499 ! 500 !-- Naming convention for appendices: _sc -> server to client transfer 501 !-- _cs -> client to server transfer 502 !-- recv -> server to client transfer 503 !-- send -> client to server transfer 509 510 ! 511 !-- Naming convention for appendices: _pc -> parent to child transfer 512 !-- _cp -> child to parent transfer 513 !-- recv -> parent to child transfer 514 !-- send -> child to parent transfer 504 515 CHARACTER(LEN=da_namelen) :: myname !< 505 516 … … 520 531 INTEGER,DIMENSION(1024) :: req !< 521 532 522 REAL(wp), DIMENSION(:), POINTER, SAVE :: base_array_ sc !< base array523 REAL(wp), DIMENSION(:), POINTER, SAVE :: base_array_c s!< base array533 REAL(wp), DIMENSION(:), POINTER, SAVE :: base_array_pc !< base array 534 REAL(wp), DIMENSION(:), POINTER, SAVE :: base_array_cp !< base array 524 535 525 536 TYPE(pedef), POINTER :: ape !< … … 532 543 533 544 ! 534 !-- Server to clientdirection.545 !-- Parent to child direction. 535 546 !-- First stride: compute size and set index 536 547 DO i = 1, me%inter_npes … … 542 553 543 554 ar => ape%array_list(j) 544 ! 545 !-- Receive index from client 555 556 ! 557 !-- Receive index from child 546 558 tag = tag + 1 547 CALL MPI_RECV( myindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, &559 CALL MPI_RECV( myindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, & 548 560 MPI_STATUS_IGNORE, ierr ) 549 561 ar%recvindex = myindex 550 ! 551 !-- Determine max, because client buffer is allocated only once 562 563 ! 564 !-- Determine max, because child buffer is allocated only once 552 565 !-- TODO: give a more meaningful comment 553 566 IF( ar%nrdims == 3 ) THEN … … 565 578 !-- The buffer for MPI_GET can be PE local, i.e. it can but must not be part of 566 579 !-- the MPI RMA window 567 CALL pmc_alloc_mem( base_array_ sc, bufsize, base_ptr )580 CALL pmc_alloc_mem( base_array_pc, bufsize, base_ptr ) 568 581 me%totalbuffersize = bufsize*wp ! total buffer size in byte 569 582 … … 582 595 583 596 ! 584 !-- C lient to serverdirection597 !-- Child to parent direction 585 598 myindex = 1 586 599 rcount = 0 … … 604 617 rcount = rcount + 1 605 618 IF ( ape%nrele > 0 ) THEN 606 CALL MPI_ISEND( myindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, &619 CALL MPI_ISEND( myindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, & 607 620 req(rcount), ierr ) 608 621 ar%sendindex = myindex 609 622 ELSE 610 CALL MPI_ISEND( noindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, &623 CALL MPI_ISEND( noindex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, & 611 624 req(rcount), ierr ) 612 625 ar%sendindex = noindex 613 626 ENDIF 627 614 628 ! 615 629 !-- Maximum of 1024 outstanding requests … … 635 649 636 650 ! 637 !-- Create RMA (one sided communication) window for data buffer c lient to server651 !-- Create RMA (one sided communication) window for data buffer child to parent 638 652 !-- transfer. 639 653 !-- The buffer of MPI_GET (counter part of transfer) can be PE-local, i.e. it 640 654 !-- can but must not be part of the MPI RMA window. Only one RMA window is 641 655 !-- required to prepare the data 642 !-- for server -> client transfer on the serverside656 !-- for parent -> child transfer on the parent side 643 657 !-- and 644 !-- for client -> server transfer on the client side 645 646 CALL pmc_alloc_mem( base_array_cs, bufsize ) 658 !-- for child -> parent transfer on the child side 659 CALL pmc_alloc_mem( base_array_cp, bufsize ) 647 660 me%totalbuffersize = bufsize * wp ! total buffer size in byte 648 661 649 662 winSize = me%totalbuffersize 650 663 651 CALL MPI_WIN_CREATE( base_array_c s, winsize, wp, MPI_INFO_NULL,&652 me%intra_comm, me%win_ server_client, ierr )653 CALL MPI_WIN_FENCE( 0, me%win_ server_client, ierr )664 CALL MPI_WIN_CREATE( base_array_cp, winsize, wp, MPI_INFO_NULL, & 665 me%intra_comm, me%win_parent_child, ierr ) 666 CALL MPI_WIN_FENCE( 0, me%win_parent_child, ierr ) 654 667 CALL MPI_BARRIER( me%intra_comm, ierr ) 655 668 … … 665 678 666 679 IF ( ape%nrele > 0 ) THEN 667 ar%sendbuf = C_LOC( base_array_cs(ar%sendindex) ) 680 ar%sendbuf = C_LOC( base_array_cp(ar%sendindex) ) 681 682 ! 668 683 !-- TODO: if this is an error to be really expected, replace the 669 684 !-- following message by a meaningful standard PALM message using 670 685 !-- the message-routine 671 686 IF ( ar%sendindex+ar%sendsize > bufsize ) THEN 672 WRITE( 0,'(a,i4,4i7,1x,a)') 'C lient Buffer too small ', i,&673 ar%sendindex, ar%sendsize, ar%sendindex+ar%sendsize, &687 WRITE( 0,'(a,i4,4i7,1x,a)') 'Child buffer too small ', i, & 688 ar%sendindex, ar%sendsize, ar%sendindex+ar%sendsize, & 674 689 bufsize, TRIM( ar%name ) 675 690 CALL MPI_ABORT( MPI_COMM_WORLD, istat, ierr ) … … 699 714 INTEGER :: myindex !< 700 715 INTEGER :: nr !< number of elements to get 701 !< from server716 !< from parent 702 717 INTEGER(KIND=MPI_ADDRESS_KIND) :: target_disp 703 718 INTEGER,DIMENSION(1) :: buf_shape … … 713 728 714 729 ! 715 !-- Synchronization of the model is done in pmci_ client_synchronize and716 !-- pmci_server_synchronize. Thereforthe RMA window can be filled without730 !-- Synchronization of the model is done in pmci_synchronize. 731 !-- Therefore the RMA window can be filled without 717 732 !-- sychronization at this point and a barrier is not necessary. 718 733 !-- Please note that waittime has to be set in pmc_s_fillbuffer AND … … 724 739 waittime = t2 - t1 725 740 ENDIF 726 ! 727 !-- Wait for buffer is filled 741 742 ! 743 !-- Wait for buffer is filled. 728 744 !-- TODO: explain in more detail what is happening here. The barrier seems to 729 !-- contradict what is said a few lines before r(i.e. that no barrier is necessary)745 !-- contradict what is said a few lines before (i.e. that no barrier is necessary) 730 746 !-- TODO: In case of PRESENT( waittime ) the barrrier would be calles twice. Why? 731 747 !-- Shouldn't it be done the same way as in pmc_putbuffer? … … 748 764 buf_shape(1) = nr 749 765 CALL C_F_POINTER( ar%recvbuf, buf, buf_shape ) 766 750 767 ! 751 768 !-- MPI passive target RMA … … 753 770 IF ( nr > 0 ) THEN 754 771 target_disp = ar%recvindex - 1 755 CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , ip-1, 0, &756 me%win_ server_client, ierr )757 CALL MPI_GET( buf, nr, MPI_REAL, ip-1, target_disp, nr, MPI_REAL, &758 me%win_ server_client, ierr )759 CALL MPI_WIN_UNLOCK( ip-1, me%win_ server_client, ierr )772 CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , ip-1, 0, & 773 me%win_parent_child, ierr ) 774 CALL MPI_GET( buf, nr, MPI_REAL, ip-1, target_disp, nr, MPI_REAL, & 775 me%win_parent_child, ierr ) 776 CALL MPI_WIN_UNLOCK( ip-1, me%win_parent_child, ierr ) 760 777 ENDIF 761 778 … … 775 792 776 793 DO ij = 1, ape%nrele 777 data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i) = &794 data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i) = & 778 795 buf(myindex:myindex+ar%a_dim(1)-1) 779 796 myindex = myindex+ar%a_dim(1) … … 804 821 INTEGER :: myindex !< 805 822 INTEGER :: nr !< number of elements to get 806 !< from server823 !< from parent 807 824 INTEGER(KIND=MPI_ADDRESS_KIND) :: target_disp !< 808 825 … … 854 871 855 872 DO ij = 1, ape%nrele 856 buf(myindex:myindex+ar%a_dim(1)-1) = &873 buf(myindex:myindex+ar%a_dim(1)-1) = & 857 874 data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i) 858 875 myindex = myindex + ar%a_dim(1) … … 864 881 865 882 ENDDO 883 866 884 ! 867 885 !-- TODO: Fence might do it, test later 868 !-- Call MPI_WIN_FENCE( 0, me%win_ server_client, ierr) !886 !-- Call MPI_WIN_FENCE( 0, me%win_parent_child, ierr) ! 869 887 ! 870 888 !-- Buffer is filled … … 875 893 876 894 #endif 877 END MODULE pmc_c lient895 END MODULE pmc_child -
palm/trunk/SOURCE/pmc_general_mod.f90
r1901 r1933 1 1 MODULE pmc_general 2 2 3 !------------------------------------------------------------------------------- -!3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------- -!18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 23 ! 22 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 27 ! 28 ! 1901 2016-05-04 15:39:38Z raasch 29 ! Code clean up. The words server/client changed to parent/child. 30 ! 28 31 ! 1900 2016-05-04 15:27:53Z raasch 29 32 ! re-formatted to match PALM style, file renamed again … … 33 36 ! 34 37 ! 1786 2016-03-08 05:49:27Z raasch 35 ! change in c lient-server data transfer: server now gets data from client36 ! instead that client put's it to the server38 ! change in child-parent data transfer: parent now gets data from child 39 ! instead of that child puts it to the parent 37 40 ! 38 41 ! 1779 2016-03-03 08:01:28Z raasch … … 90 93 INTEGER :: nrdims !< number of dimensions 91 94 INTEGER, DIMENSION(4) :: a_dim !< size of dimensions 92 TYPE(C_PTR) :: data !< pointer of data in serverspace95 TYPE(C_PTR) :: data !< pointer of data in parent space 93 96 TYPE(C_PTR), DIMENSION(2) :: po_data !< base pointers, 94 97 !< pmc_s_set_active_data_array … … 113 116 END TYPE pedef 114 117 115 TYPE, PUBLIC :: c lientdef118 TYPE, PUBLIC :: childdef 116 119 INTEGER(idp) :: totalbuffersize !< 117 120 INTEGER :: model_comm !< communicator of this model 118 INTEGER :: inter_comm !< inter communicator model and c lient119 INTEGER :: intra_comm !< intra communicator model and c lient121 INTEGER :: inter_comm !< inter communicator model and child 122 INTEGER :: intra_comm !< intra communicator model and child 120 123 INTEGER :: model_rank !< rank of this model 121 124 INTEGER :: model_npes !< number of PEs this model 122 INTEGER :: inter_npes !< number of PEs c lientmodel125 INTEGER :: inter_npes !< number of PEs child model 123 126 INTEGER :: intra_rank !< rank within intra_comm 124 INTEGER :: win_ server_client !< MPI RMA for preparing data on server AND clientside125 TYPE(pedef), DIMENSION(:), POINTER :: pes !< list of all c lientPEs126 END TYPE c lientdef127 INTEGER :: win_parent_child !< MPI RMA for preparing data on parent AND child side 128 TYPE(pedef), DIMENSION(:), POINTER :: pes !< list of all child PEs 129 END TYPE childdef 127 130 128 131 TYPE, PUBLIC :: da_namedef !< data array name definition 129 132 INTEGER :: couple_index !< unique number of array 130 CHARACTER(LEN=da_desclen) :: serverdesc !< serverarray description131 CHARACTER(LEN=da_namelen) :: nameon server !< name of array within server132 CHARACTER(LEN=da_desclen) :: c lientdesc !< clientarray description133 CHARACTER(LEN=da_namelen) :: nameonc lient !< name of array within client133 CHARACTER(LEN=da_desclen) :: parentdesc !< parent array description 134 CHARACTER(LEN=da_namelen) :: nameonparent !< name of array within parent 135 CHARACTER(LEN=da_desclen) :: childdesc !< child array description 136 CHARACTER(LEN=da_namelen) :: nameonchild !< name of array within child 134 137 END TYPE da_namedef 135 138 … … 146 149 CONTAINS 147 150 148 SUBROUTINE pmc_g_setname( myc lient, couple_index, aname )151 SUBROUTINE pmc_g_setname( mychild, couple_index, aname ) 149 152 150 153 IMPLICIT NONE … … 152 155 CHARACTER(LEN=*) :: aname !< 153 156 INTEGER, INTENT(IN) :: couple_index !< 154 TYPE(c lientdef), INTENT(INOUT) :: myclient!<157 TYPE(childdef), INTENT(INOUT) :: mychild !< 155 158 156 159 INTEGER :: i !< … … 162 165 !-- Assign array to next free index in array list. 163 166 !-- Set name of array in arraydef structure 164 DO i = 1, myc lient%inter_npes165 166 ape => myc lient%pes(i)167 DO i = 1, mychild%inter_npes 168 169 ape => mychild%pes(i) 167 170 ape%nr_arrays = ape%nr_arrays + 1 168 171 ape%array_list(ape%nr_arrays)%name = aname -
palm/trunk/SOURCE/pmc_handle_communicator_mod.f90
r1925 r1933 1 1 MODULE PMC_handle_communicator 2 2 3 !------------------------------------------------------------------------------- -!3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------- -!18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: 21 21 ! ------------------ 22 22 ! 23 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 ! 28 ! 1901 2016-05-04 15:39:38Z raasch 29 ! Initial version of purely vertical nesting introduced. 30 ! Code clean up. The words server/client changed to parent/child. 27 31 ! 28 32 ! 1900 2016-05-04 15:27:53Z raasch … … 70 74 ! ------------ 71 75 ! Handle MPI communicator in PALM model coupler 72 !------------------------------------------------------------------------------ !76 !-------------------------------------------------------------------------------! 73 77 74 78 #if defined( __parallel ) … … 81 85 #endif 82 86 83 USE pmc_general, &87 USE pmc_general, & 84 88 ONLY: pmc_status_ok, pmc_status_error, pmc_max_models 89 USE control_parameters, & 90 ONLY: message_string 85 91 86 92 IMPLICIT NONE … … 113 119 114 120 INTEGER, PUBLIC :: m_model_comm !< communicator of this model 115 INTEGER, PUBLIC :: m_to_ server_comm !< communicator to the server121 INTEGER, PUBLIC :: m_to_parent_comm !< communicator to the parent 116 122 INTEGER, PUBLIC :: m_world_rank !< 117 123 INTEGER :: m_world_npes !< 118 124 INTEGER, PUBLIC :: m_model_rank !< 119 125 INTEGER, PUBLIC :: m_model_npes !< 120 INTEGER :: m_ server_remote_size !< number of serverPEs121 122 INTEGER, DIMENSION(pmc_max_models), PUBLIC :: m_to_c lient_comm !< communicator to the client(s)123 INTEGER, DIMENSION(:), POINTER, PUBLIC :: pmc_ server_for_client!<126 INTEGER :: m_parent_remote_size !< number of parent PEs 127 128 INTEGER, DIMENSION(pmc_max_models), PUBLIC :: m_to_child_comm !< communicator to the child(ren) 129 INTEGER, DIMENSION(:), POINTER, PUBLIC :: pmc_parent_for_child !< 124 130 125 131 … … 136 142 CONTAINS 137 143 138 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode, &144 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode, & 139 145 pmc_status ) 140 146 141 USE control_parameters, &147 USE control_parameters, & 142 148 ONLY: message_string 143 149 144 USE pegrid, &150 USE pegrid, & 145 151 ONLY: myid 146 152 147 153 IMPLICIT NONE 148 154 149 CHARACTER(LEN= 7), INTENT(OUT) :: nesting_mode !<155 CHARACTER(LEN=8), INTENT(OUT) :: nesting_mode !< 150 156 CHARACTER(LEN=7), INTENT(OUT) :: nesting_datatransfer_mode !< 151 157 … … 153 159 INTEGER, INTENT(OUT) :: pmc_status !< 154 160 155 INTEGER :: c lientcount!<161 INTEGER :: childcount !< 156 162 INTEGER :: i !< 157 163 INTEGER :: ierr !< … … 160 166 INTEGER :: tag !< 161 167 162 INTEGER, DIMENSION(pmc_max_models) :: active server ! I am active server for this clientID168 INTEGER, DIMENSION(pmc_max_models) :: activeparent ! I am active parent for this child ID 163 169 INTEGER, DIMENSION(pmc_max_models+1) :: start_pe 164 170 … … 167 173 m_world_comm = MPI_COMM_WORLD 168 174 m_my_cpl_id = -1 169 c lientcount= 0170 active server= -1175 childcount = 0 176 activeparent = -1 171 177 start_pe(:) = 0 172 178 … … 177 183 IF ( m_world_rank == 0 ) THEN 178 184 179 CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode, &185 CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode, & 180 186 pmc_status ) 181 187 182 IF ( pmc_status /= pmc_no_namelist_found .AND. &183 pmc_status /= pmc_namelist_error ) &188 IF ( pmc_status /= pmc_no_namelist_found .AND. & 189 pmc_status /= pmc_namelist_error ) & 184 190 THEN 185 191 ! … … 194 200 !-- total sum of cores required by all nest domains 195 201 IF ( start_pe(m_ncpl+1) /= m_world_npes ) THEN 196 WRITE ( message_string, '(A,I6,A,I6,A)' ) &197 'nesting-setup requires more MPI procs (',&198 start_pe(m_ncpl+1), ') than provided (',&199 m_world_npes,')'202 WRITE ( message_string, '(A,I6,A,I6,A)' ) & 203 'nesting-setup requires different number of ', & 204 'MPI procs (', start_pe(m_ncpl+1), ') than ', & 205 'provided (', m_world_npes,')' 200 206 CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) 201 207 ENDIF … … 234 240 !-- Broadcast coupling layout 235 241 DO i = 1, m_ncpl 236 CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), &242 CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), & 237 243 MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) 238 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, &239 MPI_COMM_WORLD, istat ) 240 CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, &241 MPI_COMM_WORLD, istat ) 242 CALL MPI_BCAST( m_couplers(i)%npe_total, 1, MPI_INTEGER, 0, &243 MPI_COMM_WORLD, istat ) 244 CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, &245 MPI_COMM_WORLD, istat ) 246 CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, &244 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, & 245 MPI_COMM_WORLD, istat ) 246 CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, & 247 MPI_COMM_WORLD, istat ) 248 CALL MPI_BCAST( m_couplers(i)%npe_total, 1, MPI_INTEGER, 0, & 249 MPI_COMM_WORLD, istat ) 250 CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, & 251 MPI_COMM_WORLD, istat ) 252 CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, & 247 253 MPI_COMM_WORLD, istat ) 248 254 ENDDO 249 CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, &255 CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, & 250 256 MPI_COMM_WORLD, istat ) 251 CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), &257 CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), & 252 258 MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) 253 259 … … 255 261 !-- Assign global MPI processes to individual models by setting the couple id 256 262 DO i = 1, m_ncpl 257 IF ( m_world_rank >= start_pe(i) .AND. m_world_rank < start_pe(i+1) ) &263 IF ( m_world_rank >= start_pe(i) .AND. m_world_rank < start_pe(i+1) ) & 258 264 THEN 259 265 m_my_cpl_id = i … … 267 273 !-- The communictors for the individual models as created by MPI_COMM_SPLIT. 268 274 !-- The color of the model is represented by the coupler id 269 CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, &275 CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, & 270 276 istat ) 271 277 ! … … 277 283 !-- Broadcast (from PE 0) the parent id and id of every model 278 284 DO i = 1, m_ncpl 279 CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, &280 MPI_COMM_WORLD, istat ) 281 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, &285 CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, & 286 MPI_COMM_WORLD, istat ) 287 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, & 282 288 MPI_COMM_WORLD, istat ) 283 289 ENDDO … … 288 294 289 295 ! 290 !-- Create intercommunicator between server and clients.296 !-- Create intercommunicator between parent and children. 291 297 !-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of 292 298 !-- different colors. … … 296 302 IF ( m_couplers(i)%parent_id == m_my_cpl_id ) THEN 297 303 ! 298 !-- Collect serverPEs.304 !-- Collect parent PEs. 299 305 !-- Every model exept the root model has a parent model which acts as 300 !-- servermodel. Create an intercommunicator to connect current PE to301 !-- all c lientPEs306 !-- parent model. Create an intercommunicator to connect current PE to 307 !-- all children PEs 302 308 tag = 500 + i 303 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), &304 tag, m_to_c lient_comm(i), istat)305 c lientcount = clientcount + 1306 active server(i) = 1309 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), & 310 tag, m_to_child_comm(i), istat) 311 childcount = childcount + 1 312 activeparent(i) = 1 307 313 308 314 ELSEIF ( i == m_my_cpl_id) THEN 309 315 ! 310 !-- Collect client PEs. 311 !-- Every model exept the root model has a paremt model which acts as 312 !-- server model. Create an intercommunicator to connect current PE to 313 !-- all server PEs 316 !-- Collect children PEs. 317 !-- Every model except the root model has a parent model. 318 !-- Create an intercommunicator to connect current PE to all parent PEs 314 319 tag = 500 + i 315 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, &316 start_pe(m_couplers(i)%parent_id), &317 tag, m_to_ server_comm, istat )320 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, & 321 start_pe(m_couplers(i)%parent_id), & 322 tag, m_to_parent_comm, istat ) 318 323 ENDIF 319 324 … … 321 326 322 327 ! 323 !-- If I am server, count the number of clientsthat I have324 !-- Although this loop is symmetric on all processes, the "active server" flag328 !-- If I am parent, count the number of children that I have 329 !-- Although this loop is symmetric on all processes, the "activeparent" flag 325 330 !-- is true (==1) on the respective individual PE only. 326 ALLOCATE( pmc_ server_for_client(clientcount+1) )327 328 c lientcount = 0331 ALLOCATE( pmc_parent_for_child(childcount+1) ) 332 333 childcount = 0 329 334 DO i = 2, m_ncpl 330 IF ( active server(i) == 1 ) THEN331 c lientcount = clientcount + 1332 pmc_ server_for_client(clientcount) = i335 IF ( activeparent(i) == 1 ) THEN 336 childcount = childcount + 1 337 pmc_parent_for_child(childcount) = i 333 338 ENDIF 334 339 ENDDO 335 340 ! 336 !-- Get the size of the servermodel341 !-- Get the size of the parent model 337 342 IF ( m_my_cpl_id > 1 ) THEN 338 CALL MPI_COMM_REMOTE_SIZE( m_to_ server_comm, m_server_remote_size,&343 CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size, & 339 344 istat) 340 345 ELSE 341 346 ! 342 !-- The root model does not have a server343 m_ server_remote_size = -1347 !-- The root model does not have a parent 348 m_parent_remote_size = -1 344 349 ENDIF 345 350 ! … … 356 361 357 362 358 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name, &359 cpl_parent_id, lower_left_x, lower_left_y, &363 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name, & 364 cpl_parent_id, lower_left_x, lower_left_y, & 360 365 ncpl, npe_total, request_for_cpl_id ) 361 366 ! … … 366 371 IMPLICIT NONE 367 372 368 CHARACTER(LEN=*), INTENT(OUT), OPTIONAL :: cpl_name !<369 370 INTEGER, INTENT(IN), OPTIONAL :: request_for_cpl_id !<373 CHARACTER(LEN=*), INTENT(OUT), OPTIONAL :: cpl_name !< 374 375 INTEGER, INTENT(IN), OPTIONAL :: request_for_cpl_id !< 371 376 372 377 INTEGER, INTENT(OUT), OPTIONAL :: comm_world_nesting !< … … 433 438 434 439 435 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode, &440 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode, & 436 441 pmc_status ) 437 442 438 443 IMPLICIT NONE 439 444 440 CHARACTER(LEN= 7), INTENT(INOUT) :: nesting_mode445 CHARACTER(LEN=8), INTENT(INOUT) :: nesting_mode 441 446 CHARACTER(LEN=7), INTENT(INOUT) :: nesting_datatransfer_mode 442 447 443 INTEGER, INTENT(INOUT) :: pmc_status 444 INTEGER :: i, istat 448 INTEGER(iwp), INTENT(INOUT) :: pmc_status 449 INTEGER(iwp) :: bad_llcorner 450 INTEGER(iwp) :: i 451 INTEGER(iwp) :: istat 445 452 446 453 TYPE(pmc_layout), DIMENSION(pmc_max_models) :: domain_layouts … … 461 468 462 469 IF ( istat < 0 ) THEN 470 463 471 ! 464 472 !-- No nestpar-NAMELIST found 465 473 pmc_status = pmc_no_namelist_found 474 466 475 ! 467 476 !-- Set filepointer to the beginning of the file. Otherwise PE0 will later … … 471 480 472 481 ELSEIF ( istat > 0 ) THEN 482 473 483 ! 474 484 !-- Errors in reading nestpar-NAMELIST … … 481 491 !-- Output location message 482 492 CALL location_message( 'initialize communicators for nesting', .FALSE. ) 493 483 494 ! 484 495 !-- Assign the layout to the internally used variable … … 490 501 ! 491 502 !-- When id=-1 is found for the first time, the list of domains is finished 492 IF ( m_couplers(i)%id == -1 .OR. i == pmc_max_models ) THEN503 IF ( m_couplers(i)%id == -1 .OR. i == pmc_max_models ) THEN 493 504 IF ( m_couplers(i)%id == -1 ) THEN 494 505 m_ncpl = i - 1 … … 501 512 ENDDO 502 513 514 ! 515 !-- Make sure that all domains have equal lower left corner in case of vertical 516 !-- nesting 517 IF ( nesting_mode == 'vertical' ) THEN 518 bad_llcorner = 0 519 DO i = 1, m_ncpl 520 IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR. & 521 domain_layouts(i)%lower_left_y /= 0.0_wp ) THEN 522 bad_llcorner = bad_llcorner + 1 523 domain_layouts(i)%lower_left_x = 0.0_wp 524 domain_layouts(i)%lower_left_y = 0.0_wp 525 ENDIF 526 ENDDO 527 IF ( bad_llcorner /= 0) THEN 528 WRITE ( message_string, *) 'Lower left corners do not match,', & 529 'they were set to (0, 0)' 530 CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 ) 531 ENDIF 532 ENDIF 533 503 534 END SUBROUTINE read_coupling_layout 504 535 -
palm/trunk/SOURCE/pmc_interface_mod.f90
r1928 r1933 1 1 MODULE pmc_interface 2 2 3 !------------------------------------------------------------------------------ !3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------ !18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: … … 26 26 ! $Id$ 27 27 ! 28 ! 1927 2016-06-07 11:56:53Z hellstea 29 ! Error check for overlapping parallel nests added 28 ! 1901 2016-05-04 15:39:38Z raasch 29 ! Initial version of purely vertical nesting introduced. 30 ! Code clean up. The words server/client changed to parent/child. 30 31 ! 31 32 ! 1900 2016-05-04 15:27:53Z raasch … … 62 63 ! introduction of different datatransfer modes, 63 64 ! further formatting cleanup, parameter checks added (including mismatches 64 ! between root and client model settings),65 ! between root and nest model settings), 65 66 ! +routine pmci_check_setting_mismatches 66 67 ! comm_world_nesting introduced … … 106 107 ! Domain nesting interface routines. The low-level inter-domain communication 107 108 ! is conducted by the PMC-library routines. 108 !------------------------------------------------------------------------------ !109 !-------------------------------------------------------------------------------! 109 110 110 111 #if defined( __nopointer ) 111 USE arrays_3d, &112 ONLY: dzu, dzw, e, e_p, pt, pt_p, q, q_p, u, u_p, v, v_p, w, w_p, zu, &112 USE arrays_3d, & 113 ONLY: dzu, dzw, e, e_p, pt, pt_p, q, q_p, u, u_p, v, v_p, w, w_p, zu, & 113 114 zw, z0 114 115 #else 115 USE arrays_3d, &116 ONLY: dzu, dzw, e, e_p, e_1, e_2, pt, pt_p, pt_1, pt_2, q, q_p, q_1, &117 q_2, u, u_p, u_1, u_2, v, v_p, v_1, v_2, w, w_p, w_1, w_2, zu, &116 USE arrays_3d, & 117 ONLY: dzu, dzw, e, e_p, e_1, e_2, pt, pt_p, pt_1, pt_2, q, q_p, q_1, & 118 q_2, u, u_p, u_1, u_2, v, v_p, v_1, v_2, w, w_p, w_1, w_2, zu, & 118 119 zw, z0 119 120 #endif 120 121 121 USE control_parameters, &122 ONLY: coupling_char, dt_3d, dz, humidity, message_string, &123 nest_bound_l, nest_bound_r, nest_bound_s, nest_bound_n, &124 nest_domain, neutral, passive_scalar, simulated_time, &122 USE control_parameters, & 123 ONLY: coupling_char, dt_3d, dz, humidity, message_string, & 124 nest_bound_l, nest_bound_r, nest_bound_s, nest_bound_n, & 125 nest_domain, neutral, passive_scalar, simulated_time, & 125 126 topography, volume_flow 126 127 127 USE cpulog, &128 USE cpulog, & 128 129 ONLY: cpu_log, log_point_s 129 130 130 USE grid_variables, &131 USE grid_variables, & 131 132 ONLY: dx, dy 132 133 133 USE indices, &134 ONLY: nbgp, nx, nxl, nxlg, nxlu, nxr, nxrg, ny, nyn, nyng, nys, nysg, &135 nysv, nz, nzb, nzb_s_inner, nzb_u_inner, nzb_u_outer, &134 USE indices, & 135 ONLY: nbgp, nx, nxl, nxlg, nxlu, nxr, nxrg, ny, nyn, nyng, nys, nysg, & 136 nysv, nz, nzb, nzb_s_inner, nzb_u_inner, nzb_u_outer, & 136 137 nzb_v_inner, nzb_v_outer, nzb_w_inner, nzb_w_outer, nzt 137 138 … … 145 146 #endif 146 147 147 USE pegrid, &148 ONLY: collective_wait, comm1dx, comm1dy, comm2d, myid, myidx, myidy, &148 USE pegrid, & 149 ONLY: collective_wait, comm1dx, comm1dy, comm2d, myid, myidx, myidy, & 149 150 numprocs 150 151 151 USE pmc_c lient,&152 ONLY: pmc_c lientinit, pmc_c_clear_next_array_list,&153 pmc_c_getnextarray, pmc_c_get_2d_index_list, pmc_c_getbuffer, &154 pmc_c_putbuffer, pmc_c_setind_and_allocmem, &152 USE pmc_child, & 153 ONLY: pmc_childinit, pmc_c_clear_next_array_list, & 154 pmc_c_getnextarray, pmc_c_get_2d_index_list, pmc_c_getbuffer, & 155 pmc_c_putbuffer, pmc_c_setind_and_allocmem, & 155 156 pmc_c_set_dataarray, pmc_set_dataarray_name 156 157 157 USE pmc_general, &158 USE pmc_general, & 158 159 ONLY: da_namelen 159 160 160 USE pmc_handle_communicator, &161 ONLY: pmc_get_model_info, pmc_init_model, pmc_is_rootmodel, &162 pmc_no_namelist_found, pmc_ server_for_client163 164 USE pmc_mpi_wrapper, &165 ONLY: pmc_bcast, pmc_recv_from_c lient, pmc_recv_from_server,&166 pmc_send_to_c lient, pmc_send_to_server167 168 USE pmc_ server,&169 ONLY: pmc_ serverinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer,&170 pmc_s_getdata_from_buffer, pmc_s_getnextarray, &171 pmc_s_setind_and_allocmem, pmc_s_set_active_data_array, &161 USE pmc_handle_communicator, & 162 ONLY: pmc_get_model_info, pmc_init_model, pmc_is_rootmodel, & 163 pmc_no_namelist_found, pmc_parent_for_child 164 165 USE pmc_mpi_wrapper, & 166 ONLY: pmc_bcast, pmc_recv_from_child, pmc_recv_from_parent, & 167 pmc_send_to_child, pmc_send_to_parent 168 169 USE pmc_parent, & 170 ONLY: pmc_parentinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer, & 171 pmc_s_getdata_from_buffer, pmc_s_getnextarray, & 172 pmc_s_setind_and_allocmem, pmc_s_set_active_data_array, & 172 173 pmc_s_set_dataarray, pmc_s_set_2d_index_list 173 174 … … 180 181 ! 181 182 !-- Constants 182 INTEGER(iwp), PARAMETER :: c lient_to_server= 2 !:183 INTEGER(iwp), PARAMETER :: server_to_client= 1 !:183 INTEGER(iwp), PARAMETER :: child_to_parent = 2 !: 184 INTEGER(iwp), PARAMETER :: parent_to_child = 1 !: 184 185 185 186 ! … … 196 197 !: parameter for data- 197 198 !: transfer mode 198 CHARACTER(LEN= 7), SAVE :: nesting_mode = 'two-way' !: steering parameter199 CHARACTER(LEN=8), SAVE :: nesting_mode = 'two-way' !: steering parameter 199 200 !: for 1- or 2-way nesting 200 201 … … 216 217 217 218 ! 218 !-- C lientcoarse data arrays219 !-- Child coarse data arrays 219 220 INTEGER(iwp), DIMENSION(5) :: coarse_bound !: 220 221 … … 237 238 238 239 ! 239 !-- C lient interpolation coefficients and client-array indices to be precomputed240 !-- and stored.240 !-- Child interpolation coefficients and child-array indices to be 241 !-- precomputed and stored. 241 242 INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:) :: ico !: 242 243 INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:) :: icu !: … … 259 260 260 261 ! 261 !-- C lientindex arrays and log-ratio arrays for the log-law near-wall262 !-- Child index arrays and log-ratio arrays for the log-law near-wall 262 263 !-- corrections. These are not truly 3-D arrays but multiple 2-D arrays. 263 264 INTEGER(iwp), SAVE :: ncorr !: 4th dimension of the log_ratio-arrays … … 313 314 314 315 ! 315 !-- C lient-array indices to be precomputed and stored for anterpolation.316 !-- Child-array indices to be precomputed and stored for anterpolation. 316 317 INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:) :: iflu !: 317 318 INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:) :: ifuu !: … … 363 364 END INTERFACE 364 365 365 INTERFACE pmci_c lient_initialize366 MODULE PROCEDURE pmci_c lient_initialize366 INTERFACE pmci_child_initialize 367 MODULE PROCEDURE pmci_child_initialize 367 368 END INTERFACE 368 369 … … 387 388 END INTERFACE 388 389 389 INTERFACE pmci_ server_initialize390 MODULE PROCEDURE pmci_ server_initialize390 INTERFACE pmci_parent_initialize 391 MODULE PROCEDURE pmci_parent_initialize 391 392 END INTERFACE 392 393 … … 395 396 END INTERFACE pmci_set_swaplevel 396 397 397 PUBLIC anterp_relax_length_l, anterp_relax_length_r, &398 anterp_relax_length_s, anterp_relax_length_n, &399 anterp_relax_length_t, c lient_to_server, comm_world_nesting,&400 cpl_id, nested_run, nesting_datatransfer_mode, nesting_mode, &401 server_to_client402 PUBLIC pmci_c lient_initialize398 PUBLIC anterp_relax_length_l, anterp_relax_length_r, & 399 anterp_relax_length_s, anterp_relax_length_n, & 400 anterp_relax_length_t, child_to_parent, comm_world_nesting, & 401 cpl_id, nested_run, nesting_datatransfer_mode, nesting_mode, & 402 parent_to_child 403 PUBLIC pmci_child_initialize 403 404 PUBLIC pmci_datatrans 404 405 PUBLIC pmci_ensure_nest_mass_conservation 405 406 PUBLIC pmci_init 406 407 PUBLIC pmci_modelconfiguration 407 PUBLIC pmci_ server_initialize408 PUBLIC pmci_parent_initialize 408 409 PUBLIC pmci_synchronize 409 410 PUBLIC pmci_set_swaplevel … … 415 416 SUBROUTINE pmci_init( world_comm ) 416 417 417 USE control_parameters, &418 USE control_parameters, & 418 419 ONLY: message_string 419 420 … … 429 430 430 431 431 CALL pmc_init_model( world_comm, nesting_datatransfer_mode, nesting_mode, &432 CALL pmc_init_model( world_comm, nesting_datatransfer_mode, nesting_mode, & 432 433 pmc_status ) 433 434 … … 445 446 ! 446 447 !-- Check steering parameter values 447 IF ( TRIM( nesting_mode ) /= 'one-way' .AND. & 448 TRIM( nesting_mode ) /= 'two-way' ) & 448 IF ( TRIM( nesting_mode ) /= 'one-way' .AND. & 449 TRIM( nesting_mode ) /= 'two-way' .AND. & 450 TRIM( nesting_mode ) /= 'vertical' ) & 449 451 THEN 450 452 message_string = 'illegal nesting mode: ' // TRIM( nesting_mode ) … … 452 454 ENDIF 453 455 454 IF ( TRIM( nesting_datatransfer_mode ) /= 'cascade' .AND. &455 TRIM( nesting_datatransfer_mode ) /= 'mixed' .AND. &456 TRIM( nesting_datatransfer_mode ) /= 'overlap' ) &456 IF ( TRIM( nesting_datatransfer_mode ) /= 'cascade' .AND. & 457 TRIM( nesting_datatransfer_mode ) /= 'mixed' .AND. & 458 TRIM( nesting_datatransfer_mode ) /= 'overlap' ) & 457 459 THEN 458 message_string = 'illegal nesting datatransfer mode: ' &460 message_string = 'illegal nesting datatransfer mode: ' & 459 461 // TRIM( nesting_datatransfer_mode ) 460 462 CALL message( 'pmci_init', 'PA0418', 3, 2, 0, 6, 0 ) … … 468 470 !-- Get some variables required by the pmc-interface (and in some cases in the 469 471 !-- PALM code out of the pmci) out of the pmc-core 470 CALL pmc_get_model_info( comm_world_nesting = comm_world_nesting, &471 cpl_id = cpl_id, cpl_parent_id = cpl_parent_id, &472 cpl_name = cpl_name, npe_total = cpl_npe_total, &473 lower_left_x = lower_left_coord_x, &472 CALL pmc_get_model_info( comm_world_nesting = comm_world_nesting, & 473 cpl_id = cpl_id, cpl_parent_id = cpl_parent_id, & 474 cpl_name = cpl_name, npe_total = cpl_npe_total, & 475 lower_left_x = lower_left_coord_x, & 474 476 lower_left_y = lower_left_coord_y ) 475 477 ! … … 513 515 CALL pmci_setup_coordinates 514 516 ! 515 !-- Initialize the c lient (must be called before pmc_setup_server)516 CALL pmci_setup_c lient517 ! 518 !-- Initialize PMC Server519 CALL pmci_setup_ server520 ! 521 !-- Check for mismatches between settings of master and c lientvariables522 !-- (e.g., all c lientshave to follow the end_time settings of the root master)517 !-- Initialize the child (must be called before pmc_setup_parent) 518 CALL pmci_setup_child 519 ! 520 !-- Initialize PMC parent 521 CALL pmci_setup_parent 522 ! 523 !-- Check for mismatches between settings of master and child variables 524 !-- (e.g., all children have to follow the end_time settings of the root master) 523 525 CALL pmci_check_setting_mismatches 524 526 … … 529 531 530 532 531 SUBROUTINE pmci_setup_ server533 SUBROUTINE pmci_setup_parent 532 534 533 535 #if defined( __parallel ) … … 536 538 CHARACTER(LEN=32) :: myname 537 539 538 INTEGER(iwp) :: c lient_id!:540 INTEGER(iwp) :: child_id !: 539 541 INTEGER(iwp) :: ierr !: 540 542 INTEGER(iwp) :: i !: … … 558 560 REAL(wp) :: dx_cl !: 559 561 REAL(wp) :: dy_cl !: 562 REAL(wp) :: left_limit !: 563 REAL(wp) :: north_limit !: 564 REAL(wp) :: right_limit !: 565 REAL(wp) :: south_limit !: 560 566 REAL(wp) :: xez !: 561 567 REAL(wp) :: yez !: … … 568 574 569 575 ! 570 ! Initialize the pmc server571 CALL pmc_ serverinit576 ! Initialize the pmc parent 577 CALL pmc_parentinit 572 578 573 579 ! 574 580 !-- Corners of all children of the present parent 575 IF ( ( SIZE( pmc_ server_for_client) - 1 > 0 ) .AND. myid == 0 ) THEN576 ALLOCATE( ch_xl(1:SIZE( pmc_ server_for_client) - 1) )577 ALLOCATE( ch_xr(1:SIZE( pmc_ server_for_client) - 1) )578 ALLOCATE( ch_ys(1:SIZE( pmc_ server_for_client) - 1) )579 ALLOCATE( ch_yn(1:SIZE( pmc_ server_for_client) - 1) )581 IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 ) .AND. myid == 0 ) THEN 582 ALLOCATE( ch_xl(1:SIZE( pmc_parent_for_child ) - 1) ) 583 ALLOCATE( ch_xr(1:SIZE( pmc_parent_for_child ) - 1) ) 584 ALLOCATE( ch_ys(1:SIZE( pmc_parent_for_child ) - 1) ) 585 ALLOCATE( ch_yn(1:SIZE( pmc_parent_for_child ) - 1) ) 580 586 ENDIF 581 587 582 588 ! 583 589 !-- Get coordinates from all children 584 DO m = 1, SIZE( pmc_ server_for_client) - 1585 586 c lient_id = pmc_server_for_client(m)590 DO m = 1, SIZE( pmc_parent_for_child ) - 1 591 592 child_id = pmc_parent_for_child(m) 587 593 IF ( myid == 0 ) THEN 588 594 589 CALL pmc_recv_from_c lient( client_id, val, size(val), 0, 123, ierr )590 CALL pmc_recv_from_c lient( client_id, fval, size(fval), 0, 124, ierr )595 CALL pmc_recv_from_child( child_id, val, size(val), 0, 123, ierr ) 596 CALL pmc_recv_from_child( child_id, fval, size(fval), 0, 124, ierr ) 591 597 592 598 nx_cl = val(1) … … 598 604 599 605 ! 600 !-- Find the highest client level in the coarse grid for the reduced z606 !-- Find the highest nest level in the coarse grid for the reduced z 601 607 !-- transfer 602 608 DO k = 1, nz … … 612 618 ALLOCATE( cl_coord_y(-nbgp:ny_cl+nbgp) ) 613 619 614 CALL pmc_recv_from_c lient( client_id, cl_coord_x, SIZE( cl_coord_x ),&620 CALL pmc_recv_from_child( child_id, cl_coord_x, SIZE( cl_coord_x ), & 615 621 0, 11, ierr ) 616 CALL pmc_recv_from_c lient( client_id, cl_coord_y, SIZE( cl_coord_y ),&622 CALL pmc_recv_from_child( child_id, cl_coord_y, SIZE( cl_coord_y ), & 617 623 0, 12, ierr ) 618 ! WRITE ( 0, * ) 'receive from pmc Client ', client_id, nx_cl, ny_cl624 ! WRITE ( 0, * ) 'receive from pmc child ', child_id, nx_cl, ny_cl 619 625 620 626 define_coarse_grid_real(1) = lower_left_coord_x … … 631 637 632 638 ! 633 !-- Check that the c lient domain is completely inside the serverdomain.639 !-- Check that the child domain matches parent domain. 634 640 nomatch = 0 635 xez = ( nbgp + 1 ) * dx 636 yez = ( nbgp + 1 ) * dy 637 IF ( ( cl_coord_x(0) < define_coarse_grid_real(1) + xez ) .OR. & 638 ( cl_coord_x(nx_cl+1) > define_coarse_grid_real(5) - xez ) .OR. & 639 ( cl_coord_y(0) < define_coarse_grid_real(2) + yez ) .OR. & 640 ( cl_coord_y(ny_cl+1) > define_coarse_grid_real(6) - yez ) ) & 641 THEN 642 nomatch = 1 641 IF ( nesting_mode == 'vertical' ) THEN 642 right_limit = define_coarse_grid_real(5) 643 north_limit = define_coarse_grid_real(6) 644 IF ( ( cl_coord_x(nx_cl+1) /= right_limit ) .OR. & 645 ( cl_coord_y(ny_cl+1) /= north_limit ) ) THEN 646 nomatch = 1 647 ENDIF 648 ELSE 649 650 ! 651 !-- Check that the children domain is completely inside the parent domain. 652 xez = ( nbgp + 1 ) * dx 653 yez = ( nbgp + 1 ) * dy 654 left_limit = lower_left_coord_x + xez 655 right_limit = define_coarse_grid_real(5) - xez 656 south_limit = lower_left_coord_y + yez 657 north_limit = define_coarse_grid_real(6) - yez 658 IF ( ( cl_coord_x(0) < left_limit ) .OR. & 659 ( cl_coord_x(nx_cl+1) > right_limit ) .OR. & 660 ( cl_coord_y(0) < south_limit ) .OR. & 661 ( cl_coord_y(ny_cl+1) > north_limit ) ) THEN 662 nomatch = 1 663 ENDIF 643 664 ENDIF 644 665 … … 646 667 !-- Check that parallel nest domains, if any, do not overlap. 647 668 nest_overlap = 0 648 IF ( SIZE( pmc_ server_for_client) - 1 > 0 ) THEN669 IF ( SIZE( pmc_parent_for_child ) - 1 > 0 ) THEN 649 670 ch_xl(m) = cl_coord_x(-nbgp) 650 671 ch_xr(m) = cl_coord_x(nx_cl+nbgp) … … 654 675 IF ( m > 1 ) THEN 655 676 DO mm = 1, m-1 656 IF ( ( ch_xl(m) < ch_xr(mm) .OR. ch_xr(m) > ch_xl(mm) ) .AND. & 657 ( ch_ys(m) < ch_yn(mm) .OR. ch_yn(m) > ch_ys(mm) ) ) THEN 677 IF ( ( ch_xl(m) < ch_xr(mm) .OR. & 678 ch_xr(m) > ch_xl(mm) ) .AND. & 679 ( ch_ys(m) < ch_yn(mm) .OR. & 680 ch_yn(m) > ch_ys(mm) ) ) THEN 658 681 nest_overlap = 1 659 682 ENDIF … … 667 690 ! 668 691 !-- Send coarse grid information to child 669 CALL pmc_send_to_c lient( client_id, define_coarse_grid_real,&670 SIZE( define_coarse_grid_real ), 0, 21, &692 CALL pmc_send_to_child( child_id, define_coarse_grid_real, & 693 SIZE( define_coarse_grid_real ), 0, 21, & 671 694 ierr ) 672 CALL pmc_send_to_c lient( client_id, define_coarse_grid_int, 3, 0,&695 CALL pmc_send_to_child( child_id, define_coarse_grid_int, 3, 0, & 673 696 22, ierr ) 674 697 675 698 ! 676 699 !-- Send local grid to child 677 CALL pmc_send_to_c lient( client_id, coord_x, nx+1+2*nbgp, 0, 24,&700 CALL pmc_send_to_child( child_id, coord_x, nx+1+2*nbgp, 0, 24, & 678 701 ierr ) 679 CALL pmc_send_to_c lient( client_id, coord_y, ny+1+2*nbgp, 0, 25,&702 CALL pmc_send_to_child( child_id, coord_y, ny+1+2*nbgp, 0, 25, & 680 703 ierr ) 681 704 682 705 ! 683 706 !-- Also send the dzu-, dzw-, zu- and zw-arrays here 684 CALL pmc_send_to_c lient( client_id, dzu, nz_cl+1, 0, 26, ierr )685 CALL pmc_send_to_c lient( client_id, dzw, nz_cl+1, 0, 27, ierr )686 CALL pmc_send_to_c lient( client_id, zu, nz_cl+2, 0, 28, ierr )687 CALL pmc_send_to_c lient( client_id, zw, nz_cl+2, 0, 29, ierr )707 CALL pmc_send_to_child( child_id, dzu, nz_cl+1, 0, 26, ierr ) 708 CALL pmc_send_to_child( child_id, dzw, nz_cl+1, 0, 27, ierr ) 709 CALL pmc_send_to_child( child_id, zu, nz_cl+2, 0, 28, ierr ) 710 CALL pmc_send_to_child( child_id, zw, nz_cl+2, 0, 29, ierr ) 688 711 689 712 ENDIF … … 691 714 CALL MPI_BCAST( nomatch, 1, MPI_INTEGER, 0, comm2d, ierr ) 692 715 IF ( nomatch /= 0 ) THEN 693 WRITE ( message_string, * ) 'Error: nested child domain does ', &716 WRITE ( message_string, * ) 'Error: nested child domain does ', & 694 717 'not fit into its parent domain' 695 CALL message( 'pmc _palm_setup_server', 'PA0425', 3, 2, 0, 6, 0 )718 CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 ) 696 719 ENDIF 697 720 … … 700 723 WRITE ( message_string, * ) 'Nested parallel child ', & 701 724 'domains overlap' 702 CALL message( 'pmc _palm_setup_server', 'PA0426', 3, 2, 0, 6, 0 )725 CALL message( 'pmci_setup_parent', 'PA0426', 3, 2, 0, 6, 0 ) 703 726 ENDIF 704 727 … … 710 733 711 734 ! 712 !-- Include couple arrays into servercontent735 !-- Include couple arrays into parent content 713 736 !-- TO_DO: Klaus: please give a more meaningful comment 714 737 CALL pmc_s_clear_next_array_list 715 DO WHILE ( pmc_s_getnextarray( c lient_id, myname ) )716 CALL pmci_set_array_pointer( myname, c lient_id = client_id,&738 DO WHILE ( pmc_s_getnextarray( child_id, myname ) ) 739 CALL pmci_set_array_pointer( myname, child_id = child_id, & 717 740 nz_cl = nz_cl ) 718 741 ENDDO 719 CALL pmc_s_setind_and_allocmem( c lient_id )742 CALL pmc_s_setind_and_allocmem( child_id ) 720 743 ENDDO 721 744 722 IF ( ( SIZE( pmc_ server_for_client) - 1 > 0 ) .AND. myid == 0 ) THEN745 IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 ) .AND. myid == 0 ) THEN 723 746 DEALLOCATE( ch_xl ) 724 747 DEALLOCATE( ch_xr ) … … 747 770 INTEGER(iwp) :: px !: 748 771 INTEGER(iwp) :: py !: 749 INTEGER(iwp) :: server_pe !:772 INTEGER(iwp) :: parent_pe !: 750 773 751 774 INTEGER(iwp), DIMENSION(2) :: scoord !: … … 757 780 IF ( myid == 0 ) THEN 758 781 !-- TO_DO: Klaus: give more specific comment what size_of_array stands for 759 CALL pmc_recv_from_c lient( client_id, size_of_array, 2, 0, 40, ierr )782 CALL pmc_recv_from_child( child_id, size_of_array, 2, 0, 40, ierr ) 760 783 ALLOCATE( coarse_bound_all(size_of_array(1),size_of_array(2)) ) 761 CALL pmc_recv_from_c lient( client_id, coarse_bound_all,&762 784 CALL pmc_recv_from_child( child_id, coarse_bound_all, & 785 SIZE( coarse_bound_all ), 0, 41, ierr ) 763 786 764 787 ! … … 783 806 ic = 0 784 807 ! 785 !-- Loop over all c lientPEs808 !-- Loop over all children PEs 786 809 DO k = 1, size_of_array(2) 787 810 ! 788 !-- Area along y required by actual c lientPE811 !-- Area along y required by actual child PE 789 812 DO j = coarse_bound_all(3,k), coarse_bound_all(4,k) 790 813 ! 791 !-- Area along x required by actual c lientPE814 !-- Area along x required by actual child PE 792 815 DO i = coarse_bound_all(1,k), coarse_bound_all(2,k) 793 816 … … 796 819 scoord(1) = px 797 820 scoord(2) = py 798 CALL MPI_CART_RANK( comm2d, scoord, server_pe, ierr )821 CALL MPI_CART_RANK( comm2d, scoord, parent_pe, ierr ) 799 822 800 823 ic = ic + 1 801 824 ! 802 !-- First index in serverarray825 !-- First index in parent array 803 826 index_list(1,ic) = i - ( px * nrx ) + 1 + nbgp 804 827 ! 805 !-- Second index in serverarray828 !-- Second index in parent array 806 829 index_list(2,ic) = j - ( py * nry ) + 1 + nbgp 807 830 ! 808 !-- x index of c lientcoarse grid831 !-- x index of child coarse grid 809 832 index_list(3,ic) = i - coarse_bound_all(1,k) + 1 810 833 ! 811 !-- y index of c lientcoarse grid834 !-- y index of child coarse grid 812 835 index_list(4,ic) = j - coarse_bound_all(3,k) + 1 813 836 ! 814 !-- PE number of c lient837 !-- PE number of child 815 838 index_list(5,ic) = k - 1 816 839 ! 817 !-- PE number of server818 index_list(6,ic) = server_pe840 !-- PE number of parent 841 index_list(6,ic) = parent_pe 819 842 820 843 ENDDO … … 823 846 ! 824 847 !-- TO_DO: Klaus: comment what is done here 825 CALL pmc_s_set_2d_index_list( c lient_id, index_list(:,1:ic) )848 CALL pmc_s_set_2d_index_list( child_id, index_list(:,1:ic) ) 826 849 827 850 ELSE … … 829 852 !-- TO_DO: Klaus: comment why this dummy allocation is required 830 853 ALLOCATE( index_list(6,1) ) 831 CALL pmc_s_set_2d_index_list( c lient_id, index_list )854 CALL pmc_s_set_2d_index_list( child_id, index_list ) 832 855 ENDIF 833 856 … … 837 860 838 861 #endif 839 END SUBROUTINE pmci_setup_ server840 841 842 843 SUBROUTINE pmci_setup_c lient862 END SUBROUTINE pmci_setup_parent 863 864 865 866 SUBROUTINE pmci_setup_child 844 867 845 868 #if defined( __parallel ) … … 867 890 ! 868 891 !-- TO_DO: describe what is happening in this if-clause 869 !-- Root Model does not have Server and is not a client892 !-- Root model does not have a parent and is not a child 870 893 IF ( .NOT. pmc_is_rootmodel() ) THEN 871 894 872 CALL pmc_c lientinit895 CALL pmc_childinit 873 896 ! 874 897 !-- Here AND ONLY HERE the arrays are defined, which actualy will be 875 !-- exchanged between c lient and server.898 !-- exchanged between child and parent. 876 899 !-- If a variable is removed, it only has to be removed from here. 877 900 !-- Please check, if the arrays are in the list of POSSIBLE exchange arrays 878 901 !-- in subroutines: 879 !-- pmci_set_array_pointer (for serverarrays)880 !-- pmci_create_c lient_arrays (for clientarrays)902 !-- pmci_set_array_pointer (for parent arrays) 903 !-- pmci_create_child_arrays (for child arrays) 881 904 CALL pmc_set_dataarray_name( 'coarse', 'u' ,'fine', 'u', ierr ) 882 905 CALL pmc_set_dataarray_name( 'coarse', 'v' ,'fine', 'v', ierr ) … … 893 916 894 917 ! 895 !-- Send grid to server918 !-- Send grid to parent 896 919 val(1) = nx 897 920 val(2) = ny … … 903 926 IF ( myid == 0 ) THEN 904 927 905 CALL pmc_send_to_ server( val, SIZE( val ), 0, 123, ierr )906 CALL pmc_send_to_ server( fval, SIZE( fval ), 0, 124, ierr )907 CALL pmc_send_to_ server( coord_x, nx + 1 + 2 * nbgp, 0, 11, ierr )908 CALL pmc_send_to_ server( coord_y, ny + 1 + 2 * nbgp, 0, 12, ierr )928 CALL pmc_send_to_parent( val, SIZE( val ), 0, 123, ierr ) 929 CALL pmc_send_to_parent( fval, SIZE( fval ), 0, 124, ierr ) 930 CALL pmc_send_to_parent( coord_x, nx + 1 + 2 * nbgp, 0, 11, ierr ) 931 CALL pmc_send_to_parent( coord_y, ny + 1 + 2 * nbgp, 0, 12, ierr ) 909 932 910 933 ! 911 934 !-- Receive Coarse grid information. 912 935 !-- TO_DO: find shorter and more meaningful name for define_coarse_grid_real 913 CALL pmc_recv_from_ server( define_coarse_grid_real, &936 CALL pmc_recv_from_parent( define_coarse_grid_real, & 914 937 SIZE(define_coarse_grid_real), 0, 21, ierr ) 915 CALL pmc_recv_from_ server( define_coarse_grid_int, 3, 0, 22, ierr )938 CALL pmc_recv_from_parent( define_coarse_grid_int, 3, 0, 22, ierr ) 916 939 ! 917 940 !-- Debug-printouts - keep them 918 ! WRITE(0,*) 'Coarse grid from Server'941 ! WRITE(0,*) 'Coarse grid from parent ' 919 942 ! WRITE(0,*) 'startx_tot = ',define_coarse_grid_real(1) 920 943 ! WRITE(0,*) 'starty_tot = ',define_coarse_grid_real(2) … … 929 952 ENDIF 930 953 931 CALL MPI_BCAST( define_coarse_grid_real, SIZE(define_coarse_grid_real), &954 CALL MPI_BCAST( define_coarse_grid_real, SIZE(define_coarse_grid_real), & 932 955 MPI_REAL, 0, comm2d, ierr ) 933 956 CALL MPI_BCAST( define_coarse_grid_int, 3, MPI_INTEGER, 0, comm2d, ierr ) … … 941 964 942 965 ! 943 !-- Get servercoordinates on coarse grid966 !-- Get parent coordinates on coarse grid 944 967 ALLOCATE( cg%coord_x(-nbgp:cg%nx+nbgp) ) 945 968 ALLOCATE( cg%coord_y(-nbgp:cg%ny+nbgp) ) … … 951 974 952 975 ! 953 !-- Get coarse grid coordinates and val es of the z-direction from server976 !-- Get coarse grid coordinates and values of the z-direction from the parent 954 977 IF ( myid == 0) THEN 955 978 956 CALL pmc_recv_from_ server( cg%coord_x, cg%nx+1+2*nbgp, 0, 24, ierr )957 CALL pmc_recv_from_ server( cg%coord_y, cg%ny+1+2*nbgp, 0, 25, ierr )958 CALL pmc_recv_from_ server( cg%dzu, cg%nz + 1, 0, 26, ierr )959 CALL pmc_recv_from_ server( cg%dzw, cg%nz + 1, 0, 27, ierr )960 CALL pmc_recv_from_ server( cg%zu, cg%nz + 2, 0, 28, ierr )961 CALL pmc_recv_from_ server( cg%zw, cg%nz + 2, 0, 29, ierr )979 CALL pmc_recv_from_parent( cg%coord_x, cg%nx+1+2*nbgp, 0, 24, ierr ) 980 CALL pmc_recv_from_parent( cg%coord_y, cg%ny+1+2*nbgp, 0, 25, ierr ) 981 CALL pmc_recv_from_parent( cg%dzu, cg%nz + 1, 0, 26, ierr ) 982 CALL pmc_recv_from_parent( cg%dzw, cg%nz + 1, 0, 27, ierr ) 983 CALL pmc_recv_from_parent( cg%zu, cg%nz + 2, 0, 28, ierr ) 984 CALL pmc_recv_from_parent( cg%zw, cg%nz + 2, 0, 29, ierr ) 962 985 963 986 ENDIF … … 980 1003 981 1004 ! 982 !-- Include couple arrays into c lientcontent983 !-- TO_DO: Klaus: better explain the above comment (what is c lientcontent?)1005 !-- Include couple arrays into child content 1006 !-- TO_DO: Klaus: better explain the above comment (what is child content?) 984 1007 CALL pmc_c_clear_next_array_list 985 1008 DO WHILE ( pmc_c_getnextarray( myname ) ) 986 !-- TO_DO: Klaus, why the c -arrays are still up to cg%nz??987 CALL pmci_create_c lient_arrays ( myname, icl, icr, jcs, jcn, cg%nz )1009 !-- TO_DO: Klaus, why the child-arrays are still up to cg%nz?? 1010 CALL pmci_create_child_arrays ( myname, icl, icr, jcs, jcn, cg%nz ) 988 1011 ENDDO 989 1012 CALL pmc_c_setind_and_allocmem 990 1013 991 1014 ! 992 !-- Precompute interpolation coefficients and c lient-array indices1015 !-- Precompute interpolation coefficients and child-array indices 993 1016 CALL pmci_init_interp_tril 994 1017 … … 1002 1025 1003 1026 ! 1004 !-- Two-way coupling .1027 !-- Two-way coupling for general and vertical nesting. 1005 1028 !-- Precompute the index arrays and relaxation functions for the 1006 1029 !-- anterpolation 1007 IF ( nesting_mode == 'two-way' ) THEN 1030 IF ( TRIM( nesting_mode ) == 'two-way' .OR. & 1031 nesting_mode == 'vertical' ) THEN 1008 1032 CALL pmci_init_anterp_tophat 1009 1033 ENDIF … … 1097 1121 !-- Note that MPI_Gather receives data from all processes in the rank order 1098 1122 !-- TO_DO: refer to the line where this fact becomes important 1099 CALL MPI_GATHER( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, &1123 CALL MPI_GATHER( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, & 1100 1124 MPI_INTEGER, 0, comm2d, ierr ) 1101 1125 … … 1103 1127 size_of_array(1) = SIZE( coarse_bound_all, 1 ) 1104 1128 size_of_array(2) = SIZE( coarse_bound_all, 2 ) 1105 CALL pmc_send_to_ server( size_of_array, 2, 0, 40, ierr )1106 CALL pmc_send_to_ server( coarse_bound_all, SIZE( coarse_bound_all ),&1129 CALL pmc_send_to_parent( size_of_array, 2, 0, 40, ierr ) 1130 CALL pmc_send_to_parent( coarse_bound_all, SIZE( coarse_bound_all ), & 1107 1131 0, 41, ierr ) 1108 1132 ENDIF … … 1114 1138 SUBROUTINE pmci_init_interp_tril 1115 1139 ! 1116 !-- Precomputation of the interpolation coefficients and c lient-array indices1140 !-- Precomputation of the interpolation coefficients and child-array indices 1117 1141 !-- to be used by the interpolation routines interp_tril_lr, interp_tril_ns 1118 1142 !-- and interp_tril_t. … … 1167 1191 ! 1168 1192 !-- Note that the node coordinates xfs... and xcs... are relative to the 1169 !-- lower-left-bottom corner of the fc-array, not the actual c lientdomain1193 !-- lower-left-bottom corner of the fc-array, not the actual child domain 1170 1194 !-- corner 1171 1195 DO i = nxlg, nxrg … … 1262 1286 DO i = nxl-1, nxl 1263 1287 DO j = nys, nyn 1264 nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i), &1288 nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i), & 1265 1289 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1266 1290 ENDDO … … 1273 1297 i = nxr + 1 1274 1298 DO j = nys, nyn 1275 nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i), &1299 nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i), & 1276 1300 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1277 1301 ENDDO … … 1283 1307 DO j = nys-1, nys 1284 1308 DO i = nxl, nxr 1285 nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i), &1309 nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i), & 1286 1310 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1287 1311 ENDDO … … 1294 1318 j = nyn + 1 1295 1319 DO i = nxl, nxr 1296 nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i), &1320 nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i), & 1297 1321 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1298 1322 ENDDO … … 1303 1327 !-- Then determine the maximum number of near-wall nodes per wall point based 1304 1328 !-- on the grid-spacing ratios. 1305 nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r, &1329 nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r, & 1306 1330 nzt_topo_nestbc_s, nzt_topo_nestbc_n ) 1307 1331 … … 1343 1367 k = kb + 1 1344 1368 wall_index = kb 1345 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1369 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1346 1370 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1347 1371 logc_u_l(1,k,j) = lc … … 1354 1378 k = kb + 1 1355 1379 wall_index = kb 1356 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1380 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1357 1381 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1358 1382 logc_v_l(1,k,j) = lc … … 1385 1409 k = kb + 1 1386 1410 wall_index = kb 1387 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1411 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1388 1412 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1389 1413 logc_u_r(1,k,j) = lc … … 1396 1420 k = kb + 1 1397 1421 wall_index = kb 1398 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1422 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1399 1423 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1400 1424 logc_v_r(1,k,j) = lc … … 1428 1452 k = kb + 1 1429 1453 wall_index = kb 1430 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1454 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1431 1455 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1432 1456 logc_u_s(1,k,i) = lc … … 1439 1463 k = kb + 1 1440 1464 wall_index = kb 1441 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1465 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1442 1466 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1443 1467 logc_v_s(1,k,i) = lc … … 1471 1495 k = kb + 1 1472 1496 wall_index = kb 1473 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1497 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1474 1498 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1475 1499 logc_u_n(1,k,i) = lc … … 1482 1506 k = kb + 1 1483 1507 wall_index = kb 1484 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, &1508 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, & 1485 1509 inc, wall_index, z0(j,i), kb, direction, ncorr ) 1486 1510 logc_v_n(1,k,i) = lc … … 1502 1526 1503 1527 ALLOCATE( logc_w_l(1:2,nzb:nzt_topo_nestbc_l,nys:nyn) ) 1504 ALLOCATE( logc_ratio_w_l(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_l, &1528 ALLOCATE( logc_ratio_w_l(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_l, & 1505 1529 nys:nyn) ) 1506 1530 … … 1513 1537 !-- Wall for u on the south side, but not on the north side 1514 1538 i = 0 1515 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j+1,i) ) .AND. &1516 ( nzb_u_outer(j,i) == nzb_u_outer(j-1,i) ) ) &1539 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j+1,i) ) .AND. & 1540 ( nzb_u_outer(j,i) == nzb_u_outer(j-1,i) ) ) & 1517 1541 THEN 1518 1542 inc = 1 1519 1543 wall_index = j 1520 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1544 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1521 1545 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1522 1546 ! … … 1531 1555 !-- Wall for u on the north side, but not on the south side 1532 1556 i = 0 1533 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j-1,i) ) .AND. &1557 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j-1,i) ) .AND. & 1534 1558 ( nzb_u_outer(j,i) == nzb_u_outer(j+1,i) ) ) THEN 1535 1559 inc = -1 1536 1560 wall_index = j + 1 1537 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1561 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1538 1562 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1539 1563 ! … … 1548 1572 !-- Wall for w on the south side, but not on the north side. 1549 1573 i = -1 1550 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j+1,i) ) .AND. &1574 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j+1,i) ) .AND. & 1551 1575 ( nzb_w_outer(j,i) == nzb_w_outer(j-1,i) ) ) THEN 1552 1576 inc = 1 1553 1577 wall_index = j 1554 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1578 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1555 1579 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1556 1580 ! … … 1565 1589 !-- Wall for w on the north side, but not on the south side. 1566 1590 i = -1 1567 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j-1,i) ) .AND. &1591 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j-1,i) ) .AND. & 1568 1592 ( nzb_w_outer(j,i) == nzb_w_outer(j+1,i) ) ) THEN 1569 1593 inc = -1 1570 1594 wall_index = j+1 1571 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1595 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1572 1596 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1573 1597 ! … … 1589 1613 1590 1614 ALLOCATE( logc_w_r(1:2,nzb:nzt_topo_nestbc_r,nys:nyn) ) 1591 ALLOCATE( logc_ratio_w_r(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_r, &1615 ALLOCATE( logc_ratio_w_r(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_r, & 1592 1616 nys:nyn) ) 1593 1617 logc_w_r = 0 … … 1600 1624 ! 1601 1625 !-- Wall for u on the south side, but not on the north side 1602 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j+1,i) ) .AND. &1626 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j+1,i) ) .AND. & 1603 1627 ( nzb_u_outer(j,i) == nzb_u_outer(j-1,i) ) ) THEN 1604 1628 inc = 1 1605 1629 wall_index = j 1606 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1630 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1607 1631 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1608 1632 ! … … 1616 1640 ! 1617 1641 !-- Wall for u on the north side, but not on the south side 1618 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j-1,i) ) .AND. &1642 IF ( ( nzb_u_outer(j,i) > nzb_u_outer(j-1,i) ) .AND. & 1619 1643 ( nzb_u_outer(j,i) == nzb_u_outer(j+1,i) ) ) THEN 1620 1644 inc = -1 1621 1645 wall_index = j+1 1622 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1646 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1623 1647 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1624 1648 ! … … 1632 1656 ! 1633 1657 !-- Wall for w on the south side, but not on the north side 1634 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j+1,i) ) .AND. &1658 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j+1,i) ) .AND. & 1635 1659 ( nzb_w_outer(j,i) == nzb_w_outer(j-1,i) ) ) THEN 1636 1660 inc = 1 1637 1661 wall_index = j 1638 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1662 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1639 1663 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1640 1664 ! … … 1648 1672 ! 1649 1673 !-- Wall for w on the north side, but not on the south side 1650 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j-1,i) ) .AND. &1674 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j-1,i) ) .AND. & 1651 1675 ( nzb_w_outer(j,i) == nzb_w_outer(j+1,i) ) ) THEN 1652 1676 inc = -1 1653 1677 wall_index = j+1 1654 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1678 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1655 1679 k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1656 1680 … … 1673 1697 1674 1698 ALLOCATE( logc_w_s(1:2,nzb:nzt_topo_nestbc_s,nxl:nxr) ) 1675 ALLOCATE( logc_ratio_w_s(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_s, &1699 ALLOCATE( logc_ratio_w_s(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_s, & 1676 1700 nxl:nxr) ) 1677 1701 logc_w_s = 0 … … 1684 1708 !-- Wall for v on the left side, but not on the right side 1685 1709 j = 0 1686 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i+1) ) .AND. &1710 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i+1) ) .AND. & 1687 1711 ( nzb_v_outer(j,i) == nzb_v_outer(j,i-1) ) ) THEN 1688 1712 inc = 1 1689 1713 wall_index = i 1690 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1714 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1691 1715 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1692 1716 ! … … 1701 1725 !-- Wall for v on the right side, but not on the left side 1702 1726 j = 0 1703 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i-1) ) .AND. &1727 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i-1) ) .AND. & 1704 1728 ( nzb_v_outer(j,i) == nzb_v_outer(j,i+1) ) ) THEN 1705 1729 inc = -1 1706 1730 wall_index = i+1 1707 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1731 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1708 1732 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1709 1733 ! … … 1718 1742 !-- Wall for w on the left side, but not on the right side 1719 1743 j = -1 1720 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i+1) ) .AND. &1744 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i+1) ) .AND. & 1721 1745 ( nzb_w_outer(j,i) == nzb_w_outer(j,i-1) ) ) THEN 1722 1746 inc = 1 1723 1747 wall_index = i 1724 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1748 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1725 1749 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1726 1750 ! … … 1735 1759 !-- Wall for w on the right side, but not on the left side 1736 1760 j = -1 1737 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i-1) ) .AND. &1761 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i-1) ) .AND. & 1738 1762 ( nzb_w_outer(j,i) == nzb_w_outer(j,i+1) ) ) THEN 1739 1763 inc = -1 1740 1764 wall_index = i+1 1741 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1765 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1742 1766 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1743 1767 ! … … 1759 1783 1760 1784 ALLOCATE( logc_w_n(1:2,nzb:nzt_topo_nestbc_n, nxl:nxr) ) 1761 ALLOCATE( logc_ratio_w_n(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_n, &1785 ALLOCATE( logc_ratio_w_n(1:2,0:ncorr-1,nzb:nzt_topo_nestbc_n, & 1762 1786 nxl:nxr) ) 1763 1787 logc_w_n = 0 … … 1770 1794 ! 1771 1795 !-- Wall for v on the left side, but not on the right side 1772 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i+1) ) .AND. &1796 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i+1) ) .AND. & 1773 1797 ( nzb_v_outer(j,i) == nzb_v_outer(j,i-1) ) ) THEN 1774 1798 inc = 1 1775 1799 wall_index = i 1776 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1800 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1777 1801 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1778 1802 ! … … 1786 1810 ! 1787 1811 !-- Wall for v on the right side, but not on the left side 1788 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i-1) ) .AND. &1812 IF ( ( nzb_v_outer(j,i) > nzb_v_outer(j,i-1) ) .AND. & 1789 1813 ( nzb_v_outer(j,i) == nzb_v_outer(j,i+1) ) ) THEN 1790 1814 inc = -1 1791 1815 wall_index = i + 1 1792 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1816 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1793 1817 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1794 1818 ! … … 1802 1826 ! 1803 1827 !-- Wall for w on the left side, but not on the right side 1804 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i+1) ) .AND. &1828 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i+1) ) .AND. & 1805 1829 ( nzb_w_outer(j,i) == nzb_w_outer(j,i-1) ) ) THEN 1806 1830 inc = 1 1807 1831 wall_index = i 1808 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1832 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1809 1833 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1810 1834 ! … … 1818 1842 ! 1819 1843 !-- Wall for w on the right side, but not on the left side 1820 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i-1) ) .AND. &1844 IF ( ( nzb_w_outer(j,i) > nzb_w_outer(j,i-1) ) .AND. & 1821 1845 ( nzb_w_outer(j,i) == nzb_w_outer(j,i+1) ) ) THEN 1822 1846 inc = -1 1823 1847 wall_index = i+1 1824 CALL pmci_define_loglaw_correction_parameters( lc, lcr, &1848 CALL pmci_define_loglaw_correction_parameters( lc, lcr, & 1825 1849 k, i, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1826 1850 ! … … 1843 1867 1844 1868 1845 SUBROUTINE pmci_define_loglaw_correction_parameters( lc, lcr, k, ij, inc, &1869 SUBROUTINE pmci_define_loglaw_correction_parameters( lc, lcr, k, ij, inc, & 1846 1870 wall_index, z0_l, kb, direction, ncorr ) 1847 1871 … … 1897 1921 corr_index = ij + lcorr ! In this case (direction = 2) ij is j 1898 1922 IF ( lcorr == 0 ) THEN 1899 CALL pmci_find_logc_pivot_j( lc, logvelc1, ij, wall_index, &1923 CALL pmci_find_logc_pivot_j( lc, logvelc1, ij, wall_index, & 1900 1924 z0_l, inc ) 1901 1925 ENDIF … … 1905 1929 !-- valid in both directions 1906 1930 IF ( inc * corr_index < inc * lc ) THEN 1907 lcr(alcorr) = LOG( ABS( coord_y(corr_index) + 0.5_wp * dy &1908 - coord_y(wall_index) ) / z0_l ) &1931 lcr(alcorr) = LOG( ABS( coord_y(corr_index) + 0.5_wp * dy & 1932 - coord_y(wall_index) ) / z0_l ) & 1909 1933 / logvelc1 1910 1934 more = .TRUE. … … 1924 1948 corr_index = ij + lcorr ! In this case (direction = 3) ij is i 1925 1949 IF ( lcorr == 0 ) THEN 1926 CALL pmci_find_logc_pivot_i( lc, logvelc1, ij, wall_index, &1950 CALL pmci_find_logc_pivot_i( lc, logvelc1, ij, wall_index, & 1927 1951 z0_l, inc ) 1928 1952 ENDIF … … 1931 1955 !-- valid in both directions 1932 1956 IF ( inc * corr_index < inc * lc ) THEN 1933 lcr(alcorr) = LOG( ABS( coord_x(corr_index) + 0.5_wp * dx &1934 - coord_x(wall_index) ) / z0_l ) &1957 lcr(alcorr) = LOG( ABS( coord_x(corr_index) + 0.5_wp * dx & 1958 - coord_x(wall_index) ) / z0_l ) & 1935 1959 / logvelc1 1936 1960 more = .TRUE. … … 1963 1987 INTEGER(iwp) :: k1 !: 1964 1988 1965 REAL(wp), INTENT(OUT) :: logzc1 !:1989 REAL(wp), INTENT(OUT) :: logzc1 !: 1966 1990 REAL(wp), INTENT(IN) :: z0_l !: 1967 1991 … … 2072 2096 SUBROUTINE pmci_init_anterp_tophat 2073 2097 ! 2074 !-- Precomputation of the c lient-array indices for2098 !-- Precomputation of the child-array indices for 2075 2099 !-- corresponding coarse-grid array index and the 2076 2100 !-- Under-relaxation coefficients to be used by anterp_tophat. … … 2148 2172 DO ii = icl, icr 2149 2173 i = istart 2150 DO WHILE ( ( coord_x(i) < cg%coord_x(ii) - 0.5_wp * cg%dx ) .AND. &2174 DO WHILE ( ( coord_x(i) < cg%coord_x(ii) - 0.5_wp * cg%dx ) .AND. & 2151 2175 ( i < nxrg ) ) 2152 2176 i = i + 1 2153 2177 ENDDO 2154 2178 iflu(ii) = MIN( MAX( i, nxlg ), nxrg ) 2155 DO WHILE ( ( coord_x(i) < cg%coord_x(ii) + 0.5_wp * cg%dx ) .AND. &2179 DO WHILE ( ( coord_x(i) < cg%coord_x(ii) + 0.5_wp * cg%dx ) .AND. & 2156 2180 ( i < nxrg ) ) 2157 2181 i = i + 1 … … 2166 2190 DO ii = icl, icr 2167 2191 i = istart 2168 DO WHILE ( ( coord_x(i) + 0.5_wp * dx < cg%coord_x(ii) ) .AND. &2192 DO WHILE ( ( coord_x(i) + 0.5_wp * dx < cg%coord_x(ii) ) .AND. & 2169 2193 ( i < nxrg ) ) 2170 2194 i = i + 1 2171 2195 ENDDO 2172 2196 iflo(ii) = MIN( MAX( i, nxlg ), nxrg ) 2173 DO WHILE ( ( coord_x(i) + 0.5_wp * dx < cg%coord_x(ii) + cg%dx ) &2197 DO WHILE ( ( coord_x(i) + 0.5_wp * dx < cg%coord_x(ii) + cg%dx ) & 2174 2198 .AND. ( i < nxrg ) ) 2175 2199 i = i + 1 … … 2184 2208 DO jj = jcs, jcn 2185 2209 j = jstart 2186 DO WHILE ( ( coord_y(j) < cg%coord_y(jj) - 0.5_wp * cg%dy ) .AND. &2210 DO WHILE ( ( coord_y(j) < cg%coord_y(jj) - 0.5_wp * cg%dy ) .AND. & 2187 2211 ( j < nyng ) ) 2188 2212 j = j + 1 2189 2213 ENDDO 2190 2214 jflv(jj) = MIN( MAX( j, nysg ), nyng ) 2191 DO WHILE ( ( coord_y(j) < cg%coord_y(jj) + 0.5_wp * cg%dy ) .AND. &2215 DO WHILE ( ( coord_y(j) < cg%coord_y(jj) + 0.5_wp * cg%dy ) .AND. & 2192 2216 ( j < nyng ) ) 2193 2217 j = j + 1 … … 2202 2226 DO jj = jcs, jcn 2203 2227 j = jstart 2204 DO WHILE ( ( coord_y(j) + 0.5_wp * dy < cg%coord_y(jj) ) .AND. &2228 DO WHILE ( ( coord_y(j) + 0.5_wp * dy < cg%coord_y(jj) ) .AND. & 2205 2229 ( j < nyng ) ) 2206 2230 j = j + 1 2207 2231 ENDDO 2208 2232 jflo(jj) = MIN( MAX( j, nysg ), nyng ) 2209 DO WHILE ( ( coord_y(j) + 0.5_wp * dy < cg%coord_y(jj) + cg%dy ) &2233 DO WHILE ( ( coord_y(j) + 0.5_wp * dy < cg%coord_y(jj) + cg%dy ) & 2210 2234 .AND. ( j < nyng ) ) 2211 2235 j = j + 1 … … 2222 2246 DO kk = 1, kctw 2223 2247 k = kstart 2224 DO WHILE ( ( zw(k) < cg%zw(kk) - 0.5_wp * cg%dzw(kk) ) .AND. &2248 DO WHILE ( ( zw(k) < cg%zw(kk) - 0.5_wp * cg%dzw(kk) ) .AND. & 2225 2249 ( k < nzt ) ) 2226 2250 k = k + 1 2227 2251 ENDDO 2228 2252 kflw(kk) = MIN( MAX( k, 1 ), nzt + 1 ) 2229 DO WHILE ( ( zw(k) < cg%zw(kk) + 0.5_wp * cg%dzw(kk+1) ) .AND. &2253 DO WHILE ( ( zw(k) < cg%zw(kk) + 0.5_wp * cg%dzw(kk+1) ) .AND. & 2230 2254 ( k < nzt ) ) 2231 2255 k = k + 1 … … 2242 2266 DO kk = 1, kctu 2243 2267 k = kstart 2244 DO WHILE ( ( zu(k) < cg%zu(kk) - 0.5_wp * cg%dzu(kk) ) .AND. &2268 DO WHILE ( ( zu(k) < cg%zu(kk) - 0.5_wp * cg%dzu(kk) ) .AND. & 2245 2269 ( k < nzt ) ) 2246 2270 k = k + 1 2247 2271 ENDDO 2248 2272 kflo(kk) = MIN( MAX( k, 1 ), nzt + 1 ) 2249 DO WHILE ( ( zu(k) < cg%zu(kk) + 0.5_wp * cg%dzu(kk+1) ) .AND. &2273 DO WHILE ( ( zu(k) < cg%zu(kk) + 0.5_wp * cg%dzu(kk+1) ) .AND. & 2250 2274 ( k < nzt ) ) 2251 2275 k = k + 1 … … 2272 2296 !-- Spatial under-relaxation coefficients 2273 2297 ALLOCATE( frax(icl:icr) ) 2274 2275 DO ii = icl, icr2276 IF ( nest_bound_l ) THEN2277 xi = ( MAX( 0.0_wp, ( cg%coord_x(ii) - lower_left_coord_x ) ) / &2278 anterp_relax_length_l )**42279 ELSEIF ( nest_bound_r ) THEN2280 xi = ( MAX( 0.0_wp, ( lower_left_coord_x + ( nx + 1 ) * dx - &2281 cg%coord_x(ii) ) ) / &2282 anterp_relax_length_r )**42283 ELSE2284 xi = 999999.9_wp2285 ENDIF2286 frax(ii) = xi / ( 1.0_wp + xi )2287 ENDDO2288 2289 2298 ALLOCATE( fray(jcs:jcn) ) 2290 2291 DO jj = jcs, jcn 2292 IF ( nest_bound_s ) THEN 2293 eta = ( MAX( 0.0_wp, ( cg%coord_y(jj) - lower_left_coord_y ) ) / & 2294 anterp_relax_length_s )**4 2295 ELSEIF ( nest_bound_n ) THEN 2296 eta = ( MAX( 0.0_wp, ( lower_left_coord_y + ( ny + 1 ) * dy - & 2297 cg%coord_y(jj)) ) / & 2298 anterp_relax_length_n )**4 2299 ELSE 2300 eta = 999999.9_wp 2301 ENDIF 2302 fray(jj) = eta / ( 1.0_wp + eta ) 2303 ENDDO 2299 2300 frax(icl:icr) = 1.0_wp 2301 fray(jcs:jcn) = 1.0_wp 2302 2303 IF ( nesting_mode /= 'vertical' ) THEN 2304 DO ii = icl, icr 2305 IF ( nest_bound_l ) THEN 2306 xi = ( MAX( 0.0_wp, ( cg%coord_x(ii) - & 2307 lower_left_coord_x ) ) / anterp_relax_length_l )**4 2308 ELSEIF ( nest_bound_r ) THEN 2309 xi = ( MAX( 0.0_wp, ( lower_left_coord_x + ( nx + 1 ) * dx - & 2310 cg%coord_x(ii) ) ) / & 2311 anterp_relax_length_r )**4 2312 ELSE 2313 xi = 999999.9_wp 2314 ENDIF 2315 frax(ii) = xi / ( 1.0_wp + xi ) 2316 ENDDO 2317 2318 2319 DO jj = jcs, jcn 2320 IF ( nest_bound_s ) THEN 2321 eta = ( MAX( 0.0_wp, ( cg%coord_y(jj) - & 2322 lower_left_coord_y ) ) / anterp_relax_length_s )**4 2323 ELSEIF ( nest_bound_n ) THEN 2324 eta = ( MAX( 0.0_wp, ( lower_left_coord_y + ( ny + 1 ) * dy - & 2325 cg%coord_y(jj)) ) / & 2326 anterp_relax_length_n )**4 2327 ELSE 2328 eta = 999999.9_wp 2329 ENDIF 2330 fray(jj) = eta / ( 1.0_wp + eta ) 2331 ENDDO 2332 ENDIF 2304 2333 2305 2334 ALLOCATE( fraz(0:kctu) ) … … 2347 2376 height = zu(k) - zu(nzb_s_inner(j,i)) 2348 2377 fw = EXP( -cfw * height / glsf ) 2349 tkefactor_l(k,j) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * &2378 tkefactor_l(k,j) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * & 2350 2379 ( glsf / glsc )**p23 ) 2351 2380 ENDDO … … 2365 2394 height = zu(k) - zu(nzb_s_inner(j,i)) 2366 2395 fw = EXP( -cfw * height / glsf ) 2367 tkefactor_r(k,j) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * &2396 tkefactor_r(k,j) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * & 2368 2397 ( glsf / glsc )**p23 ) 2369 2398 ENDDO … … 2383 2412 height = zu(k) - zu(nzb_s_inner(j,i)) 2384 2413 fw = EXP( -cfw*height / glsf ) 2385 tkefactor_s(k,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * &2414 tkefactor_s(k,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * & 2386 2415 ( glsf / glsc )**p23 ) 2387 2416 ENDDO … … 2401 2430 height = zu(k) - zu(nzb_s_inner(j,i)) 2402 2431 fw = EXP( -cfw * height / glsf ) 2403 tkefactor_n(k,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * &2432 tkefactor_n(k,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * & 2404 2433 ( glsf / glsc )**p23 ) 2405 2434 ENDDO … … 2417 2446 height = zu(k) - zu(nzb_s_inner(j,i)) 2418 2447 fw = EXP( -cfw * height / glsf ) 2419 tkefactor_t(j,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * &2448 tkefactor_t(j,i) = c_tkef * ( fw0 * fw + ( 1.0_wp - fw ) * & 2420 2449 ( glsf / glsc )**p23 ) 2421 2450 ENDDO … … 2425 2454 2426 2455 #endif 2427 END SUBROUTINE pmci_setup_c lient2456 END SUBROUTINE pmci_setup_child 2428 2457 2429 2458 … … 2456 2485 2457 2486 2458 SUBROUTINE pmci_set_array_pointer( name, c lient_id, nz_cl )2487 SUBROUTINE pmci_set_array_pointer( name, child_id, nz_cl ) 2459 2488 2460 2489 IMPLICIT NONE 2461 2490 2462 INTEGER, INTENT(IN) :: c lient_id!:2491 INTEGER, INTENT(IN) :: child_id !: 2463 2492 INTEGER, INTENT(IN) :: nz_cl !: 2464 2493 CHARACTER(LEN=*), INTENT(IN) :: name !: … … 2493 2522 #if defined( __nopointer ) 2494 2523 IF ( ASSOCIATED( p_3d ) ) THEN 2495 CALL pmc_s_set_dataarray( c lient_id, p_3d, nz_cl, nz )2524 CALL pmc_s_set_dataarray( child_id, p_3d, nz_cl, nz ) 2496 2525 ELSEIF ( ASSOCIATED( p_2d ) ) THEN 2497 CALL pmc_s_set_dataarray( c lient_id, p_2d )2526 CALL pmc_s_set_dataarray( child_id, p_2d ) 2498 2527 ELSE 2499 2528 ! … … 2501 2530 IF ( myid == 0 .AND. cpl_id == 1 ) THEN 2502 2531 2503 message_string = 'pointer for array "' // TRIM( name ) // &2532 message_string = 'pointer for array "' // TRIM( name ) // & 2504 2533 '" can''t be associated' 2505 2534 CALL message( 'pmci_set_array_pointer', 'PA0117', 3, 2, 0, 6, 0 ) … … 2519 2548 2520 2549 IF ( ASSOCIATED( p_3d ) ) THEN 2521 CALL pmc_s_set_dataarray( c lient_id, p_3d, nz_cl, nz,&2550 CALL pmc_s_set_dataarray( child_id, p_3d, nz_cl, nz, & 2522 2551 array_2 = p_3d_sec ) 2523 2552 ELSEIF ( ASSOCIATED( p_2d ) ) THEN 2524 CALL pmc_s_set_dataarray( c lient_id, p_2d )2553 CALL pmc_s_set_dataarray( child_id, p_2d ) 2525 2554 ELSE 2526 2555 ! … … 2528 2557 IF ( myid == 0 .AND. cpl_id == 1 ) THEN 2529 2558 2530 message_string = 'pointer for array "' // TRIM( name ) // &2559 message_string = 'pointer for array "' // TRIM( name ) // & 2531 2560 '" can''t be associated' 2532 2561 CALL message( 'pmci_set_array_pointer', 'PA0117', 3, 2, 0, 6, 0 ) … … 2545 2574 2546 2575 2547 SUBROUTINE pmci_create_c lient_arrays( name, is, ie, js, je, nzc )2576 SUBROUTINE pmci_create_child_arrays( name, is, ie, js, je, nzc ) 2548 2577 2549 2578 IMPLICIT NONE … … 2599 2628 ELSE 2600 2629 ! 2601 !-- Give only one message for the first c lientdomain2630 !-- Give only one message for the first child domain 2602 2631 IF ( myid == 0 .AND. cpl_id == 2 ) THEN 2603 2632 2604 message_string = 'pointer for array "' // TRIM( name ) // &2633 message_string = 'pointer for array "' // TRIM( name ) // & 2605 2634 '" can''t be associated' 2606 CALL message( 'pmci_create_c lient_arrays', 'PA0170', 3, 2, 0, 6, 0 )2635 CALL message( 'pmci_create_child_arrays', 'PA0170', 3, 2, 0, 6, 0 ) 2607 2636 ELSE 2608 2637 ! … … 2613 2642 2614 2643 #endif 2615 END SUBROUTINE pmci_create_client_arrays 2616 2617 2618 2619 SUBROUTINE pmci_server_initialize 2620 !-- TO_DO: add general explanations about what this subroutine does 2644 END SUBROUTINE pmci_create_child_arrays 2645 2646 2647 2648 SUBROUTINE pmci_parent_initialize 2649 2650 ! 2651 !-- Send data for the children in order to let them create initial 2652 !-- conditions by interpolating the parent-domain fields. 2621 2653 #if defined( __parallel ) 2622 2654 IMPLICIT NONE 2623 2655 2624 INTEGER(iwp) :: c lient_id!:2656 INTEGER(iwp) :: child_id !: 2625 2657 INTEGER(iwp) :: m !: 2626 2658 2627 REAL(wp) :: waittime !:2628 2629 2630 DO m = 1, SIZE( pmc_ server_for_client) - 12631 c lient_id = pmc_server_for_client(m)2632 CALL pmc_s_fillbuffer( c lient_id, waittime=waittime )2659 REAL(wp) :: waittime !: 2660 2661 2662 DO m = 1, SIZE( pmc_parent_for_child ) - 1 2663 child_id = pmc_parent_for_child(m) 2664 CALL pmc_s_fillbuffer( child_id, waittime=waittime ) 2633 2665 ENDDO 2634 2666 2635 2667 #endif 2636 END SUBROUTINE pmci_server_initialize 2637 2638 2639 2640 SUBROUTINE pmci_client_initialize 2641 !-- TO_DO: add general explanations about what this subroutine does 2668 END SUBROUTINE pmci_parent_initialize 2669 2670 2671 2672 SUBROUTINE pmci_child_initialize 2673 2674 ! 2675 !-- Create initial conditions for the current child domain by interpolating 2676 !-- the parent-domain fields. 2642 2677 #if defined( __parallel ) 2643 2678 IMPLICIT NONE … … 2650 2685 INTEGER(iwp) :: jcs !: 2651 2686 2652 REAL(wp) :: waittime !:2653 2654 ! 2655 !-- Root id is never a c lient2687 REAL(wp) :: waittime !: 2688 2689 ! 2690 !-- Root id is never a child 2656 2691 IF ( cpl_id > 1 ) THEN 2657 2692 2658 2693 ! 2659 !-- C lient domain boundaries in the serverindex space2694 !-- Child domain boundaries in the parent index space 2660 2695 icl = coarse_bound(1) 2661 2696 icr = coarse_bound(2) … … 2664 2699 2665 2700 ! 2666 !-- Get data from server2701 !-- Get data from the parent 2667 2702 CALL pmc_c_getbuffer( waittime = waittime ) 2668 2703 2669 2704 ! 2670 2705 !-- The interpolation. 2671 CALL pmci_interp_tril_all ( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, &2706 CALL pmci_interp_tril_all ( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 2672 2707 r2yo, r1zo, r2zo, nzb_u_inner, 'u' ) 2673 CALL pmci_interp_tril_all ( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, &2708 CALL pmci_interp_tril_all ( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 2674 2709 r2yv, r1zo, r2zo, nzb_v_inner, 'v' ) 2675 CALL pmci_interp_tril_all ( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, &2710 CALL pmci_interp_tril_all ( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 2676 2711 r2yo, r1zw, r2zw, nzb_w_inner, 'w' ) 2677 CALL pmci_interp_tril_all ( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, &2712 CALL pmci_interp_tril_all ( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 2678 2713 r2yo, r1zo, r2zo, nzb_s_inner, 'e' ) 2679 2714 IF ( .NOT. neutral ) THEN 2680 CALL pmci_interp_tril_all ( pt, ptc, ico, jco, kco, r1xo, r2xo, &2715 CALL pmci_interp_tril_all ( pt, ptc, ico, jco, kco, r1xo, r2xo, & 2681 2716 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, 's' ) 2682 2717 ENDIF 2683 2718 IF ( humidity .OR. passive_scalar ) THEN 2684 CALL pmci_interp_tril_all ( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, &2719 CALL pmci_interp_tril_all ( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 2685 2720 r2yo, r1zo, r2zo, nzb_s_inner, 's' ) 2686 2721 ENDIF … … 2710 2745 2711 2746 2712 SUBROUTINE pmci_interp_tril_all( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, &2747 SUBROUTINE pmci_interp_tril_all( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, & 2713 2748 r1z, r2z, kb, var ) 2714 2749 ! 2715 !-- Interpolation of the internal values for the c lient-domain initialization2750 !-- Interpolation of the internal values for the child-domain initialization 2716 2751 !-- This subroutine is based on trilinear interpolation. 2717 2752 !-- Coding based on interp_tril_lr/sn/t … … 2739 2774 2740 2775 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), INTENT(INOUT) :: f !: 2741 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), INTENT(IN) :: fc !:2776 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), INTENT(IN) :: fc !: 2742 2777 REAL(wp), DIMENSION(nxlg:nxrg), INTENT(IN) :: r1x !: 2743 2778 REAL(wp), DIMENSION(nxlg:nxrg), INTENT(IN) :: r2x !: … … 2762 2797 jb = nys 2763 2798 je = nyn 2764 IF ( nest_bound_l ) THEN 2765 ib = nxl - 1 2766 ! 2767 !-- For u, nxl is a ghost node, but not for the other variables 2768 IF ( var == 'u' ) THEN 2769 ib = nxl 2799 IF ( nesting_mode /= 'vertical' ) THEN 2800 IF ( nest_bound_l ) THEN 2801 ib = nxl - 1 2802 ! 2803 !-- For u, nxl is a ghost node, but not for the other variables 2804 IF ( var == 'u' ) THEN 2805 ib = nxl 2806 ENDIF 2770 2807 ENDIF 2771 ENDIF2772 IF ( nest_bound_s ) THEN2773 jb = nys - 1 2774 ! 2775 !-- For v, nys is a ghost node, but not for the other variables 2776 IF ( var == 'v' ) THEN2777 jb = nys2808 IF ( nest_bound_s ) THEN 2809 jb = nys - 1 2810 ! 2811 !-- For v, nys is a ghost node, but not for the other variables 2812 IF ( var == 'v' ) THEN 2813 jb = nys 2814 ENDIF 2778 2815 ENDIF 2779 ENDIF 2780 IF ( nest_bound_r ) THEN 2781 ie = nxr + 1 2782 ENDIF 2783 IF ( nest_bound_n ) THEN 2784 je = nyn + 1 2785 ENDIF 2786 2816 IF ( nest_bound_r ) THEN 2817 ie = nxr + 1 2818 ENDIF 2819 IF ( nest_bound_n ) THEN 2820 je = nyn + 1 2821 ENDIF 2822 ENDIF 2787 2823 ! 2788 2824 !-- Trilinear interpolation. … … 2828 2864 k = kb(j,i) + 1 2829 2865 DO WHILE ( zu(k) < zuc1 ) 2830 logratio = ( LOG( ( zu(k) - zu(kb(j,i)) ) / z0(j,i)) ) / logzuc1 2866 logratio = ( LOG( ( zu(k) - zu(kb(j,i)) ) / z0(j,i)) ) / & 2867 logzuc1 2831 2868 f(k,j,i) = logratio * f(k1,j,i) 2832 2869 k = k + 1 … … 2849 2886 2850 2887 #endif 2851 END SUBROUTINE pmci_c lient_initialize2888 END SUBROUTINE pmci_child_initialize 2852 2889 2853 2890 … … 2855 2892 SUBROUTINE pmci_check_setting_mismatches 2856 2893 ! 2857 !-- Check for mismatches between settings of master and c lientvariables2858 !-- (e.g., all c lientshave to follow the end_time settings of the root model).2894 !-- Check for mismatches between settings of master and child variables 2895 !-- (e.g., all children have to follow the end_time settings of the root model). 2859 2896 !-- The root model overwrites variables in the other models, so these variables 2860 2897 !-- only need to be set once in file PARIN. … … 2862 2899 #if defined( __parallel ) 2863 2900 2864 USE control_parameters, &2901 USE control_parameters, & 2865 2902 ONLY: dt_restart, end_time, message_string, restart_time, time_restart 2866 2903 … … 2884 2921 IF ( .NOT. pmc_is_rootmodel() ) THEN 2885 2922 IF ( end_time /= end_time_root ) THEN 2886 WRITE( message_string, * ) 'mismatch between root model and ', &2887 'c lient settings & end_time(root) = ', end_time_root,&2888 ' & end_time(c lient) = ', end_time, ' & client value is set',&2923 WRITE( message_string, * ) 'mismatch between root model and ', & 2924 'child settings & end_time(root) = ', end_time_root, & 2925 ' & end_time(child) = ', end_time, ' & child value is set', & 2889 2926 ' to root value' 2890 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, &2927 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, & 2891 2928 0 ) 2892 2929 end_time = end_time_root … … 2901 2938 IF ( .NOT. pmc_is_rootmodel() ) THEN 2902 2939 IF ( restart_time /= restart_time_root ) THEN 2903 WRITE( message_string, * ) 'mismatch between root model and ', &2904 'c lient settings & restart_time(root) = ', restart_time_root,&2905 ' & restart_time(c lient) = ', restart_time, ' & client ',&2940 WRITE( message_string, * ) 'mismatch between root model and ', & 2941 'child settings & restart_time(root) = ', restart_time_root, & 2942 ' & restart_time(child) = ', restart_time, ' & child ', & 2906 2943 'value is set to root value' 2907 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, &2944 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, & 2908 2945 0 ) 2909 2946 restart_time = restart_time_root … … 2918 2955 IF ( .NOT. pmc_is_rootmodel() ) THEN 2919 2956 IF ( dt_restart /= dt_restart_root ) THEN 2920 WRITE( message_string, * ) 'mismatch between root model and ', &2921 'c lient settings & dt_restart(root) = ', dt_restart_root,&2922 ' & dt_restart(c lient) = ', dt_restart, ' & client ',&2957 WRITE( message_string, * ) 'mismatch between root model and ', & 2958 'child settings & dt_restart(root) = ', dt_restart_root, & 2959 ' & dt_restart(child) = ', dt_restart, ' & child ', & 2923 2960 'value is set to root value' 2924 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, &2961 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, & 2925 2962 0 ) 2926 2963 dt_restart = dt_restart_root … … 2935 2972 IF ( .NOT. pmc_is_rootmodel() ) THEN 2936 2973 IF ( time_restart /= time_restart_root ) THEN 2937 WRITE( message_string, * ) 'mismatch between root model and ', &2938 'c lient settings & time_restart(root) = ', time_restart_root,&2939 ' & time_restart(c lient) = ', time_restart, ' & client ',&2974 WRITE( message_string, * ) 'mismatch between root model and ', & 2975 'child settings & time_restart(root) = ', time_restart_root, & 2976 ' & time_restart(child) = ', time_restart, ' & child ', & 2940 2977 'value is set to root value' 2941 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, &2978 CALL message( 'pmci_check_setting_mismatches', 'PA0419', 0, 1, 0, 6, & 2942 2979 0 ) 2943 2980 time_restart = time_restart_root … … 2953 2990 SUBROUTINE pmci_ensure_nest_mass_conservation 2954 2991 2955 #if defined( __parallel )2956 2992 ! 2957 2993 !-- Adjust the volume-flow rate through the top boundary so that the net volume … … 2959 2995 IMPLICIT NONE 2960 2996 2961 INTEGER(iwp) :: i !:2962 INTEGER(iwp) :: ierr !:2963 INTEGER(iwp) :: j !:2964 INTEGER(iwp) :: k !:2997 INTEGER(iwp) :: i !: 2998 INTEGER(iwp) :: ierr !: 2999 INTEGER(iwp) :: j !: 3000 INTEGER(iwp) :: k !: 2965 3001 2966 3002 REAL(wp) :: dxdy !: … … 2996 3032 #if defined( __parallel ) 2997 3033 IF ( collective_wait ) CALL MPI_BARRIER( comm2d, ierr ) 2998 CALL MPI_ALLREDUCE( volume_flow_l(1), volume_flow(1), 1, MPI_REAL, &3034 CALL MPI_ALLREDUCE( volume_flow_l(1), volume_flow(1), 1, MPI_REAL, & 2999 3035 MPI_SUM, comm2d, ierr ) 3000 3036 #else … … 3029 3065 #if defined( __parallel ) 3030 3066 IF ( collective_wait ) CALL MPI_BARRIER( comm2d, ierr ) 3031 CALL MPI_ALLREDUCE( volume_flow_l(2), volume_flow(2), 1, MPI_REAL, &3067 CALL MPI_ALLREDUCE( volume_flow_l(2), volume_flow(2), 1, MPI_REAL, & 3032 3068 MPI_SUM, comm2d, ierr ) 3033 3069 #else … … 3049 3085 #if defined( __parallel ) 3050 3086 IF ( collective_wait ) CALL MPI_BARRIER( comm2d, ierr ) 3051 CALL MPI_ALLREDUCE( volume_flow_l(3), volume_flow(3), 1, MPI_REAL, &3087 CALL MPI_ALLREDUCE( volume_flow_l(3), volume_flow(3), 1, MPI_REAL, & 3052 3088 MPI_SUM, comm2d, ierr ) 3053 3089 #else … … 3066 3102 ENDDO 3067 3103 3068 #endif3069 3104 END SUBROUTINE pmci_ensure_nest_mass_conservation 3070 3105 … … 3101 3136 IMPLICIT NONE 3102 3137 3103 INTEGER(iwp), INTENT(IN) :: swaplevel !: swaplevel (1 or 2) of PALM's3104 !: timestep3105 3106 INTEGER(iwp) :: c lient_id!:3107 INTEGER(iwp) :: m !:3108 3109 DO m = 1, SIZE( pmc_ server_for_client)-13110 c lient_id = pmc_server_for_client(m)3111 CALL pmc_s_set_active_data_array( c lient_id, swaplevel )3138 INTEGER(iwp), INTENT(IN) :: swaplevel !: swaplevel (1 or 2) of PALM's 3139 !: timestep 3140 3141 INTEGER(iwp) :: child_id !: 3142 INTEGER(iwp) :: m !: 3143 3144 DO m = 1, SIZE( pmc_parent_for_child )-1 3145 child_id = pmc_parent_for_child(m) 3146 CALL pmc_s_set_active_data_array( child_id, swaplevel ) 3112 3147 ENDDO 3113 3148 … … 3131 3166 INTEGER(iwp) :: istat !: 3132 3167 3133 CHARACTER(LEN=*), INTENT(IN) :: local_nesting_mode3134 3135 IF ( local_nesting_mode== 'one-way' ) THEN3136 3137 CALL pmci_c lient_datatrans( server_to_client)3138 CALL pmci_ server_datatrans( server_to_client)3168 CHARACTER(LEN=*), INTENT(IN) :: local_nesting_mode 3169 3170 IF ( TRIM( local_nesting_mode ) == 'one-way' ) THEN 3171 3172 CALL pmci_child_datatrans( parent_to_child ) 3173 CALL pmci_parent_datatrans( parent_to_child ) 3139 3174 3140 3175 ELSE 3141 3176 3142 IF 3143 3144 CALL pmci_c lient_datatrans( server_to_client)3145 CALL pmci_ server_datatrans( server_to_client)3146 3147 CALL pmci_ server_datatrans( client_to_server)3148 CALL pmci_c lient_datatrans( client_to_server)3149 3150 ELSEIF ( nesting_datatransfer_mode == 'overlap') THEN3151 3152 CALL pmci_ server_datatrans( server_to_client)3153 CALL pmci_c lient_datatrans( server_to_client)3154 3155 CALL pmci_c lient_datatrans( client_to_server)3156 CALL pmci_ server_datatrans( client_to_server)3157 3158 ELSEIF 3159 3160 CALL pmci_ server_datatrans( server_to_client)3161 CALL pmci_c lient_datatrans( server_to_client)3162 3163 CALL pmci_ server_datatrans( client_to_server)3164 CALL pmci_c lient_datatrans( client_to_server)3177 IF( nesting_datatransfer_mode == 'cascade' ) THEN 3178 3179 CALL pmci_child_datatrans( parent_to_child ) 3180 CALL pmci_parent_datatrans( parent_to_child ) 3181 3182 CALL pmci_parent_datatrans( child_to_parent ) 3183 CALL pmci_child_datatrans( child_to_parent ) 3184 3185 ELSEIF( nesting_datatransfer_mode == 'overlap') THEN 3186 3187 CALL pmci_parent_datatrans( parent_to_child ) 3188 CALL pmci_child_datatrans( parent_to_child ) 3189 3190 CALL pmci_child_datatrans( child_to_parent ) 3191 CALL pmci_parent_datatrans( child_to_parent ) 3192 3193 ELSEIF( TRIM( nesting_datatransfer_mode ) == 'mixed' ) THEN 3194 3195 CALL pmci_parent_datatrans( parent_to_child ) 3196 CALL pmci_child_datatrans( parent_to_child ) 3197 3198 CALL pmci_parent_datatrans( child_to_parent ) 3199 CALL pmci_child_datatrans( child_to_parent ) 3165 3200 3166 3201 ENDIF … … 3173 3208 3174 3209 3175 SUBROUTINE pmci_ server_datatrans( direction )3210 SUBROUTINE pmci_parent_datatrans( direction ) 3176 3211 3177 3212 IMPLICIT NONE 3178 3213 3179 INTEGER(iwp), INTENT(IN) :: direction !:3214 INTEGER(iwp), INTENT(IN) :: direction !: 3180 3215 3181 3216 #if defined( __parallel ) 3182 INTEGER(iwp) :: c lient_id!:3217 INTEGER(iwp) :: child_id !: 3183 3218 INTEGER(iwp) :: i !: 3184 3219 INTEGER(iwp) :: j !: … … 3191 3226 3192 3227 3193 DO m = 1, SIZE( PMC_Server_for_Client )-13194 c lient_id = PMC_Server_for_Client(m)3228 DO m = 1, SIZE( pmc_parent_for_child ) - 1 3229 child_id = pmc_parent_for_child(m) 3195 3230 3196 IF ( direction == server_to_client) THEN3197 CALL cpu_log( log_point_s(71), 'pmc serversend', 'start' )3198 CALL pmc_s_fillbuffer( c lient_id )3199 CALL cpu_log( log_point_s(71), 'pmc serversend', 'stop' )3231 IF ( direction == parent_to_child ) THEN 3232 CALL cpu_log( log_point_s(71), 'pmc parent send', 'start' ) 3233 CALL pmc_s_fillbuffer( child_id ) 3234 CALL cpu_log( log_point_s(71), 'pmc parent send', 'stop' ) 3200 3235 ELSE 3201 3236 ! 3202 !-- Communication from c lient to server3203 CALL cpu_log( log_point_s(72), 'pmc serverrecv', 'start' )3204 c lient_id = pmc_server_for_client(m)3205 CALL pmc_s_getdata_from_buffer( c lient_id )3206 CALL cpu_log( log_point_s(72), 'pmc serverrecv', 'stop' )3237 !-- Communication from child to parent 3238 CALL cpu_log( log_point_s(72), 'pmc parent recv', 'start' ) 3239 child_id = pmc_parent_for_child(m) 3240 CALL pmc_s_getdata_from_buffer( child_id ) 3241 CALL cpu_log( log_point_s(72), 'pmc parent recv', 'stop' ) 3207 3242 3208 3243 ! … … 3234 3269 3235 3270 #endif 3236 END SUBROUTINE pmci_ server_datatrans3237 3238 3239 3240 SUBROUTINE pmci_c lient_datatrans( direction )3271 END SUBROUTINE pmci_parent_datatrans 3272 3273 3274 3275 SUBROUTINE pmci_child_datatrans( direction ) 3241 3276 3242 3277 IMPLICIT NONE … … 3258 3293 IF ( cpl_id > 1 ) THEN 3259 3294 ! 3260 !-- C lient domain boundaries in the serverindice space.3295 !-- Child domain boundaries in the parent indice space. 3261 3296 icl = coarse_bound(1) 3262 3297 icr = coarse_bound(2) … … 3264 3299 jcn = coarse_bound(4) 3265 3300 3266 IF ( direction == server_to_client) THEN3267 3268 CALL cpu_log( log_point_s(73), 'pmc c lientrecv', 'start' )3301 IF ( direction == parent_to_child ) THEN 3302 3303 CALL cpu_log( log_point_s(73), 'pmc child recv', 'start' ) 3269 3304 CALL pmc_c_getbuffer( ) 3270 CALL cpu_log( log_point_s(73), 'pmc c lientrecv', 'stop' )3305 CALL cpu_log( log_point_s(73), 'pmc child recv', 'stop' ) 3271 3306 3272 3307 CALL cpu_log( log_point_s(75), 'pmc interpolation', 'start' ) … … 3276 3311 ELSE 3277 3312 ! 3278 !-- direction == c lient_to_server3313 !-- direction == child_to_parent 3279 3314 CALL cpu_log( log_point_s(76), 'pmc anterpolation', 'start' ) 3280 3315 CALL pmci_anterpolation 3281 3316 CALL cpu_log( log_point_s(76), 'pmc anterpolation', 'stop' ) 3282 3317 3283 CALL cpu_log( log_point_s(74), 'pmc c lientsend', 'start' )3318 CALL cpu_log( log_point_s(74), 'pmc child send', 'start' ) 3284 3319 CALL pmc_c_putbuffer( ) 3285 CALL cpu_log( log_point_s(74), 'pmc c lientsend', 'stop' )3320 CALL cpu_log( log_point_s(74), 'pmc child send', 'stop' ) 3286 3321 3287 3322 ENDIF … … 3297 3332 3298 3333 ! 3299 !-- Add IF-condition here: IF not vertical nesting 3334 !-- In case of vertical nesting no interpolation is needed for the 3335 !-- horizontal boundaries 3336 IF ( nesting_mode /= 'vertical' ) THEN 3337 3338 ! 3300 3339 !-- Left border pe: 3301 IF ( nest_bound_l ) THEN 3302 CALL pmci_interp_tril_lr( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 3303 r2yo, r1zo, r2zo, nzb_u_inner, logc_u_l, & 3304 logc_ratio_u_l, nzt_topo_nestbc_l, 'l', & 3305 'u' ) 3306 CALL pmci_interp_tril_lr( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 3307 r2yv, r1zo, r2zo, nzb_v_inner, logc_v_l, & 3308 logc_ratio_v_l, nzt_topo_nestbc_l, 'l', & 3309 'v' ) 3310 CALL pmci_interp_tril_lr( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 3311 r2yo, r1zw, r2zw, nzb_w_inner, logc_w_l, & 3312 logc_ratio_w_l, nzt_topo_nestbc_l, 'l', & 3313 'w' ) 3314 CALL pmci_interp_tril_lr( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 3315 r2yo, r1zo, r2zo, nzb_s_inner, logc_u_l, & 3316 logc_ratio_u_l, nzt_topo_nestbc_l, 'l', & 3317 'e' ) 3318 IF ( .NOT. neutral ) THEN 3319 CALL pmci_interp_tril_lr( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3320 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3321 logc_u_l, logc_ratio_u_l, & 3322 nzt_topo_nestbc_l, 'l', 's' ) 3323 ENDIF 3324 IF ( humidity .OR. passive_scalar ) THEN 3325 CALL pmci_interp_tril_lr( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 3326 r2yo, r1zo, r2zo, nzb_s_inner, & 3327 logc_u_l, logc_ratio_u_l, & 3328 nzt_topo_nestbc_l, 'l', 's' ) 3329 ENDIF 3330 3331 IF ( nesting_mode == 'one-way' ) THEN 3332 CALL pmci_extrap_ifoutflow_lr( u, nzb_u_inner, 'l', 'u' ) 3333 CALL pmci_extrap_ifoutflow_lr( v, nzb_v_inner, 'l', 'v' ) 3334 CALL pmci_extrap_ifoutflow_lr( w, nzb_w_inner, 'l', 'w' ) 3335 CALL pmci_extrap_ifoutflow_lr( e, nzb_s_inner, 'l', 'e' ) 3340 IF ( nest_bound_l ) THEN 3341 CALL pmci_interp_tril_lr( u, uc, icu, jco, kco, r1xu, r2xu, & 3342 r1yo, r2yo, r1zo, r2zo, nzb_u_inner, & 3343 logc_u_l, logc_ratio_u_l, & 3344 nzt_topo_nestbc_l, 'l', 'u' ) 3345 CALL pmci_interp_tril_lr( v, vc, ico, jcv, kco, r1xo, r2xo, & 3346 r1yv, r2yv, r1zo, r2zo, nzb_v_inner, & 3347 logc_v_l, logc_ratio_v_l, & 3348 nzt_topo_nestbc_l, 'l', 'v' ) 3349 CALL pmci_interp_tril_lr( w, wc, ico, jco, kcw, r1xo, r2xo, & 3350 r1yo, r2yo, r1zw, r2zw, nzb_w_inner, & 3351 logc_w_l, logc_ratio_w_l, & 3352 nzt_topo_nestbc_l, 'l', 'w' ) 3353 CALL pmci_interp_tril_lr( e, ec, ico, jco, kco, r1xo, r2xo, & 3354 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3355 logc_u_l, logc_ratio_u_l, & 3356 nzt_topo_nestbc_l, 'l', 'e' ) 3336 3357 IF ( .NOT. neutral ) THEN 3337 CALL pmci_extrap_ifoutflow_lr( pt,nzb_s_inner, 'l', 's' ) 3358 CALL pmci_interp_tril_lr( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3359 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3360 logc_u_l, logc_ratio_u_l, & 3361 nzt_topo_nestbc_l, 'l', 's' ) 3338 3362 ENDIF 3339 3363 IF ( humidity .OR. passive_scalar ) THEN 3340 CALL pmci_extrap_ifoutflow_lr( q, nzb_s_inner, 'l', 's' ) 3364 CALL pmci_interp_tril_lr( q, qc, ico, jco, kco, r1xo, r2xo, & 3365 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3366 logc_u_l, logc_ratio_u_l, & 3367 nzt_topo_nestbc_l, 'l', 's' ) 3341 3368 ENDIF 3369 3370 IF ( TRIM( nesting_mode ) == 'one-way' ) THEN 3371 CALL pmci_extrap_ifoutflow_lr( u, nzb_u_inner, 'l', 'u' ) 3372 CALL pmci_extrap_ifoutflow_lr( v, nzb_v_inner, 'l', 'v' ) 3373 CALL pmci_extrap_ifoutflow_lr( w, nzb_w_inner, 'l', 'w' ) 3374 CALL pmci_extrap_ifoutflow_lr( e, nzb_s_inner, 'l', 'e' ) 3375 IF ( .NOT. neutral ) THEN 3376 CALL pmci_extrap_ifoutflow_lr( pt,nzb_s_inner, 'l', 's' ) 3377 ENDIF 3378 IF ( humidity .OR. passive_scalar ) THEN 3379 CALL pmci_extrap_ifoutflow_lr( q, nzb_s_inner, 'l', 's' ) 3380 ENDIF 3381 ENDIF 3382 3342 3383 ENDIF 3343 3384 3344 ENDIF 3345 ! 3346 !-- Right border pe 3347 IF ( nest_bound_r ) THEN 3348 CALL pmci_interp_tril_lr( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 3349 r2yo, r1zo, r2zo, nzb_u_inner, logc_u_r, & 3350 logc_ratio_u_r, nzt_topo_nestbc_r, 'r', & 3351 'u' ) 3352 CALL pmci_interp_tril_lr( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 3353 r2yv, r1zo, r2zo, nzb_v_inner, logc_v_r, & 3354 logc_ratio_v_r, nzt_topo_nestbc_r, 'r', & 3355 'v' ) 3356 CALL pmci_interp_tril_lr( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 3357 r2yo, r1zw, r2zw, nzb_w_inner, logc_w_r, & 3358 logc_ratio_w_r, nzt_topo_nestbc_r, 'r', & 3359 'w' ) 3360 CALL pmci_interp_tril_lr( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 3361 r2yo, r1zo, r2zo, nzb_s_inner, logc_u_r, & 3362 logc_ratio_u_r, nzt_topo_nestbc_r, 'r', & 3363 'e' ) 3364 IF ( .NOT. neutral ) THEN 3365 CALL pmci_interp_tril_lr( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3366 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3367 logc_u_r, logc_ratio_u_r, & 3368 nzt_topo_nestbc_r, 'r', 's' ) 3369 ENDIF 3370 IF ( humidity .OR. passive_scalar ) THEN 3371 CALL pmci_interp_tril_lr( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 3372 r2yo, r1zo, r2zo, nzb_s_inner, & 3373 logc_u_r, logc_ratio_u_r, & 3374 nzt_topo_nestbc_r, 'r', 's' ) 3375 ENDIF 3376 3377 IF ( nesting_mode == 'one-way' ) THEN 3378 CALL pmci_extrap_ifoutflow_lr( u, nzb_u_inner, 'r', 'u' ) 3379 CALL pmci_extrap_ifoutflow_lr( v, nzb_v_inner, 'r', 'v' ) 3380 CALL pmci_extrap_ifoutflow_lr( w, nzb_w_inner, 'r', 'w' ) 3381 CALL pmci_extrap_ifoutflow_lr( e, nzb_s_inner, 'r', 'e' ) 3385 ! 3386 !-- Right border pe 3387 IF ( nest_bound_r ) THEN 3388 CALL pmci_interp_tril_lr( u, uc, icu, jco, kco, r1xu, r2xu, & 3389 r1yo, r2yo, r1zo, r2zo, nzb_u_inner, & 3390 logc_u_r, logc_ratio_u_r, & 3391 nzt_topo_nestbc_r, 'r', 'u' ) 3392 CALL pmci_interp_tril_lr( v, vc, ico, jcv, kco, r1xo, r2xo, & 3393 r1yv, r2yv, r1zo, r2zo, nzb_v_inner, & 3394 logc_v_r, logc_ratio_v_r, & 3395 nzt_topo_nestbc_r, 'r', 'v' ) 3396 CALL pmci_interp_tril_lr( w, wc, ico, jco, kcw, r1xo, r2xo, & 3397 r1yo, r2yo, r1zw, r2zw, nzb_w_inner, & 3398 logc_w_r, logc_ratio_w_r, & 3399 nzt_topo_nestbc_r, 'r', 'w' ) 3400 CALL pmci_interp_tril_lr( e, ec, ico, jco, kco, r1xo, r2xo, & 3401 r1yo,r2yo, r1zo, r2zo, nzb_s_inner, & 3402 logc_u_r, logc_ratio_u_r, & 3403 nzt_topo_nestbc_r, 'r', 'e' ) 3382 3404 IF ( .NOT. neutral ) THEN 3383 CALL pmci_extrap_ifoutflow_lr( pt,nzb_s_inner, 'r', 's' ) 3405 CALL pmci_interp_tril_lr( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3406 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3407 logc_u_r, logc_ratio_u_r, & 3408 nzt_topo_nestbc_r, 'r', 's' ) 3384 3409 ENDIF 3385 3410 IF ( humidity .OR. passive_scalar ) THEN 3386 CALL pmci_extrap_ifoutflow_lr( q, nzb_s_inner, 'r', 's' ) 3411 CALL pmci_interp_tril_lr( q, qc, ico, jco, kco, r1xo, r2xo, & 3412 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3413 logc_u_r, logc_ratio_u_r, & 3414 nzt_topo_nestbc_r, 'r', 's' ) 3387 3415 ENDIF 3416 3417 IF ( TRIM( nesting_mode ) == 'one-way' ) THEN 3418 CALL pmci_extrap_ifoutflow_lr( u, nzb_u_inner, 'r', 'u' ) 3419 CALL pmci_extrap_ifoutflow_lr( v, nzb_v_inner, 'r', 'v' ) 3420 CALL pmci_extrap_ifoutflow_lr( w, nzb_w_inner, 'r', 'w' ) 3421 CALL pmci_extrap_ifoutflow_lr( e, nzb_s_inner, 'r', 'e' ) 3422 IF ( .NOT. neutral ) THEN 3423 CALL pmci_extrap_ifoutflow_lr( pt,nzb_s_inner, 'r', 's' ) 3424 ENDIF 3425 IF ( humidity .OR. passive_scalar ) THEN 3426 CALL pmci_extrap_ifoutflow_lr( q, nzb_s_inner, 'r', 's' ) 3427 ENDIF 3428 ENDIF 3429 3388 3430 ENDIF 3389 3431 3390 ENDIF 3391 ! 3392 !-- South border pe 3393 IF ( nest_bound_s ) THEN 3394 CALL pmci_interp_tril_sn( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 3395 r2yo, r1zo, r2zo, nzb_u_inner, logc_u_s, & 3396 logc_ratio_u_s, nzt_topo_nestbc_s, 's', & 3397 'u' ) 3398 CALL pmci_interp_tril_sn( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 3399 r2yv, r1zo, r2zo, nzb_v_inner, logc_v_s, & 3400 logc_ratio_v_s, nzt_topo_nestbc_s, 's', & 3401 'v' ) 3402 CALL pmci_interp_tril_sn( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 3403 r2yo, r1zw, r2zw, nzb_w_inner, logc_w_s, & 3404 logc_ratio_w_s, nzt_topo_nestbc_s, 's', & 3405 'w' ) 3406 CALL pmci_interp_tril_sn( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 3407 r2yo, r1zo, r2zo, nzb_s_inner, logc_u_s, & 3408 logc_ratio_u_s, nzt_topo_nestbc_s, 's', & 3409 'e' ) 3410 IF ( .NOT. neutral ) THEN 3411 CALL pmci_interp_tril_sn( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3412 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3413 logc_u_s, logc_ratio_u_s, & 3414 nzt_topo_nestbc_s, 's', 's' ) 3415 ENDIF 3416 IF ( humidity .OR. passive_scalar ) THEN 3417 CALL pmci_interp_tril_sn( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 3418 r2yo, r1zo, r2zo, nzb_s_inner, & 3419 logc_u_s, logc_ratio_u_s, & 3420 nzt_topo_nestbc_s, 's', 's' ) 3421 ENDIF 3422 3423 IF ( nesting_mode == 'one-way' ) THEN 3424 CALL pmci_extrap_ifoutflow_sn( u, nzb_u_inner, 's', 'u' ) 3425 CALL pmci_extrap_ifoutflow_sn( v, nzb_v_inner, 's', 'v' ) 3426 CALL pmci_extrap_ifoutflow_sn( w, nzb_w_inner, 's', 'w' ) 3427 CALL pmci_extrap_ifoutflow_sn( e, nzb_s_inner, 's', 'e' ) 3432 ! 3433 !-- South border pe 3434 IF ( nest_bound_s ) THEN 3435 CALL pmci_interp_tril_sn( u, uc, icu, jco, kco, r1xu, r2xu, & 3436 r1yo, r2yo, r1zo, r2zo, nzb_u_inner, & 3437 logc_u_s, logc_ratio_u_s, & 3438 nzt_topo_nestbc_s, 's', 'u' ) 3439 CALL pmci_interp_tril_sn( v, vc, ico, jcv, kco, r1xo, r2xo, & 3440 r1yv, r2yv, r1zo, r2zo, nzb_v_inner, & 3441 logc_v_s, logc_ratio_v_s, & 3442 nzt_topo_nestbc_s, 's', 'v' ) 3443 CALL pmci_interp_tril_sn( w, wc, ico, jco, kcw, r1xo, r2xo, & 3444 r1yo, r2yo, r1zw, r2zw, nzb_w_inner, & 3445 logc_w_s, logc_ratio_w_s, & 3446 nzt_topo_nestbc_s, 's','w' ) 3447 CALL pmci_interp_tril_sn( e, ec, ico, jco, kco, r1xo, r2xo, & 3448 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3449 logc_u_s, logc_ratio_u_s, & 3450 nzt_topo_nestbc_s, 's', 'e' ) 3428 3451 IF ( .NOT. neutral ) THEN 3429 CALL pmci_extrap_ifoutflow_sn( pt,nzb_s_inner, 's', 's' ) 3452 CALL pmci_interp_tril_sn( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3453 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3454 logc_u_s, logc_ratio_u_s, & 3455 nzt_topo_nestbc_s, 's', 's' ) 3430 3456 ENDIF 3431 3457 IF ( humidity .OR. passive_scalar ) THEN 3432 CALL pmci_extrap_ifoutflow_sn( q, nzb_s_inner, 's', 's' ) 3458 CALL pmci_interp_tril_sn( q, qc, ico, jco, kco, r1xo, r2xo, & 3459 r1yo,r2yo, r1zo, r2zo, nzb_s_inner, & 3460 logc_u_s, logc_ratio_u_s, & 3461 nzt_topo_nestbc_s, 's', 's' ) 3433 3462 ENDIF 3463 3464 IF ( TRIM( nesting_mode ) == 'one-way' ) THEN 3465 CALL pmci_extrap_ifoutflow_sn( u, nzb_u_inner, 's', 'u' ) 3466 CALL pmci_extrap_ifoutflow_sn( v, nzb_v_inner, 's', 'v' ) 3467 CALL pmci_extrap_ifoutflow_sn( w, nzb_w_inner, 's', 'w' ) 3468 CALL pmci_extrap_ifoutflow_sn( e, nzb_s_inner, 's', 'e' ) 3469 IF ( .NOT. neutral ) THEN 3470 CALL pmci_extrap_ifoutflow_sn( pt,nzb_s_inner, 's', 's' ) 3471 ENDIF 3472 IF ( humidity .OR. passive_scalar ) THEN 3473 CALL pmci_extrap_ifoutflow_sn( q, nzb_s_inner, 's', 's' ) 3474 ENDIF 3475 ENDIF 3476 3434 3477 ENDIF 3435 3478 3436 ENDIF 3437 ! 3438 !-- North border pe 3439 IF ( nest_bound_n ) THEN 3440 CALL pmci_interp_tril_sn( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 3441 r2yo, r1zo, r2zo, nzb_u_inner, logc_u_n, & 3442 logc_ratio_u_n, nzt_topo_nestbc_n, 'n', & 3443 'u' ) 3444 CALL pmci_interp_tril_sn( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 3445 r2yv, r1zo, r2zo, nzb_v_inner, logc_v_n, & 3446 logc_ratio_v_n, nzt_topo_nestbc_n, 'n', & 3447 'v' ) 3448 CALL pmci_interp_tril_sn( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 3449 r2yo, r1zw, r2zw, nzb_w_inner, logc_w_n, & 3450 logc_ratio_w_n, nzt_topo_nestbc_n, 'n', & 3451 'w' ) 3452 CALL pmci_interp_tril_sn( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 3453 r2yo, r1zo, r2zo, nzb_s_inner, logc_u_n, & 3454 logc_ratio_u_n, nzt_topo_nestbc_n, 'n', & 3455 'e' ) 3456 IF ( .NOT. neutral ) THEN 3457 CALL pmci_interp_tril_sn( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3458 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3459 logc_u_n, logc_ratio_u_n, & 3460 nzt_topo_nestbc_n, 'n', 's' ) 3461 ENDIF 3462 IF ( humidity .OR. passive_scalar ) THEN 3463 CALL pmci_interp_tril_sn( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 3464 r2yo, r1zo, r2zo, nzb_s_inner, & 3465 logc_u_n, logc_ratio_u_n, & 3466 nzt_topo_nestbc_n, 'n', 's' ) 3467 ENDIF 3468 3469 IF ( nesting_mode == 'one-way' ) THEN 3470 CALL pmci_extrap_ifoutflow_sn( u, nzb_u_inner, 'n', 'u' ) 3471 CALL pmci_extrap_ifoutflow_sn( v, nzb_v_inner, 'n', 'v' ) 3472 CALL pmci_extrap_ifoutflow_sn( w, nzb_w_inner, 'n', 'w' ) 3473 CALL pmci_extrap_ifoutflow_sn( e, nzb_s_inner, 'n', 'e' ) 3479 ! 3480 !-- North border pe 3481 IF ( nest_bound_n ) THEN 3482 CALL pmci_interp_tril_sn( u, uc, icu, jco, kco, r1xu, r2xu, & 3483 r1yo, r2yo, r1zo, r2zo, nzb_u_inner, & 3484 logc_u_n, logc_ratio_u_n, & 3485 nzt_topo_nestbc_n, 'n', 'u' ) 3486 CALL pmci_interp_tril_sn( v, vc, ico, jcv, kco, r1xo, r2xo, & 3487 r1yv, r2yv, r1zo, r2zo, nzb_v_inner, & 3488 logc_v_n, logc_ratio_v_n, & 3489 nzt_topo_nestbc_n, 'n', 'v' ) 3490 CALL pmci_interp_tril_sn( w, wc, ico, jco, kcw, r1xo, r2xo, & 3491 r1yo, r2yo, r1zw, r2zw, nzb_w_inner, & 3492 logc_w_n, logc_ratio_w_n, & 3493 nzt_topo_nestbc_n, 'n', 'w' ) 3494 CALL pmci_interp_tril_sn( e, ec, ico, jco, kco, r1xo, r2xo, & 3495 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3496 logc_u_n, logc_ratio_u_n, & 3497 nzt_topo_nestbc_n, 'n', 'e' ) 3474 3498 IF ( .NOT. neutral ) THEN 3475 CALL pmci_extrap_ifoutflow_sn( pt,nzb_s_inner, 'n', 's' ) 3499 CALL pmci_interp_tril_sn( pt, ptc, ico, jco, kco, r1xo, r2xo, & 3500 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3501 logc_u_n, logc_ratio_u_n, & 3502 nzt_topo_nestbc_n, 'n', 's' ) 3476 3503 ENDIF 3477 3504 IF ( humidity .OR. passive_scalar ) THEN 3478 CALL pmci_extrap_ifoutflow_sn( q, nzb_s_inner, 'n', 's' ) 3505 CALL pmci_interp_tril_sn( q, qc, ico, jco, kco, r1xo, r2xo, & 3506 r1yo, r2yo, r1zo, r2zo, nzb_s_inner, & 3507 logc_u_n, logc_ratio_u_n, & 3508 nzt_topo_nestbc_n, 'n', 's' ) 3479 3509 ENDIF 3480 3510 3511 IF ( TRIM( nesting_mode ) == 'one-way' ) THEN 3512 CALL pmci_extrap_ifoutflow_sn( u, nzb_u_inner, 'n', 'u' ) 3513 CALL pmci_extrap_ifoutflow_sn( v, nzb_v_inner, 'n', 'v' ) 3514 CALL pmci_extrap_ifoutflow_sn( w, nzb_w_inner, 'n', 'w' ) 3515 CALL pmci_extrap_ifoutflow_sn( e, nzb_s_inner, 'n', 'e' ) 3516 IF ( .NOT. neutral ) THEN 3517 CALL pmci_extrap_ifoutflow_sn( pt,nzb_s_inner, 'n', 's' ) 3518 ENDIF 3519 IF ( humidity .OR. passive_scalar ) THEN 3520 CALL pmci_extrap_ifoutflow_sn( q, nzb_s_inner, 'n', 's' ) 3521 ENDIF 3522 3523 ENDIF 3524 3481 3525 ENDIF 3482 3526 3483 ENDIF 3527 ENDIF !: IF ( nesting_mode /= 'vertical' ) 3484 3528 3485 3529 ! 3486 3530 !-- All PEs are top-border PEs 3487 CALL pmci_interp_tril_t( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, &3531 CALL pmci_interp_tril_t( u, uc, icu, jco, kco, r1xu, r2xu, r1yo, & 3488 3532 r2yo, r1zo, r2zo, 'u' ) 3489 CALL pmci_interp_tril_t( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, &3533 CALL pmci_interp_tril_t( v, vc, ico, jcv, kco, r1xo, r2xo, r1yv, & 3490 3534 r2yv, r1zo, r2zo, 'v' ) 3491 CALL pmci_interp_tril_t( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, &3535 CALL pmci_interp_tril_t( w, wc, ico, jco, kcw, r1xo, r2xo, r1yo, & 3492 3536 r2yo, r1zw, r2zw, 'w' ) 3493 CALL pmci_interp_tril_t( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, &3537 CALL pmci_interp_tril_t( e, ec, ico, jco, kco, r1xo, r2xo, r1yo, & 3494 3538 r2yo, r1zo, r2zo, 'e' ) 3495 3539 IF ( .NOT. neutral ) THEN 3496 CALL pmci_interp_tril_t( pt, ptc, ico, jco, kco, r1xo, r2xo, r1yo, &3540 CALL pmci_interp_tril_t( pt, ptc, ico, jco, kco, r1xo, r2xo, r1yo, & 3497 3541 r2yo, r1zo, r2zo, 's' ) 3498 3542 ENDIF 3499 3543 IF ( humidity .OR. passive_scalar ) THEN 3500 CALL pmci_interp_tril_t( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, &3544 CALL pmci_interp_tril_t( q, qc, ico, jco, kco, r1xo, r2xo, r1yo, & 3501 3545 r2yo, r1zo, r2zo, 's' ) 3502 3546 ENDIF 3503 3547 3504 IF ( nesting_mode== 'one-way' ) THEN3548 IF ( TRIM( nesting_mode ) == 'one-way' ) THEN 3505 3549 CALL pmci_extrap_ifoutflow_t( u, 'u' ) 3506 3550 CALL pmci_extrap_ifoutflow_t( v, 'v' ) … … 3513 3557 CALL pmci_extrap_ifoutflow_t( q, 's' ) 3514 3558 ENDIF 3515 ENDIF3559 ENDIF 3516 3560 3517 3561 END SUBROUTINE pmci_interpolation … … 3526 3570 IMPLICIT NONE 3527 3571 3528 CALL pmci_anterp_tophat( u, uc, kctu, iflu, ifuu, jflo, jfuo, kflo, &3572 CALL pmci_anterp_tophat( u, uc, kctu, iflu, ifuu, jflo, jfuo, kflo, & 3529 3573 kfuo, ijfc_u, 'u' ) 3530 CALL pmci_anterp_tophat( v, vc, kctu, iflo, ifuo, jflv, jfuv, kflo, &3574 CALL pmci_anterp_tophat( v, vc, kctu, iflo, ifuo, jflv, jfuv, kflo, & 3531 3575 kfuo, ijfc_v, 'v' ) 3532 CALL pmci_anterp_tophat( w, wc, kctw, iflo, ifuo, jflo, jfuo, kflw, &3576 CALL pmci_anterp_tophat( w, wc, kctw, iflo, ifuo, jflo, jfuo, kflw, & 3533 3577 kfuw, ijfc_s, 'w' ) 3534 3578 IF ( .NOT. neutral ) THEN 3535 CALL pmci_anterp_tophat( pt, ptc, kctu, iflo, ifuo, jflo, jfuo, kflo, &3579 CALL pmci_anterp_tophat( pt, ptc, kctu, iflo, ifuo, jflo, jfuo, kflo, & 3536 3580 kfuo, ijfc_s, 's' ) 3537 3581 ENDIF 3538 3582 IF ( humidity .OR. passive_scalar ) THEN 3539 CALL pmci_anterp_tophat( q, qc, kctu, iflo, ifuo, jflo, jfuo, kflo, &3583 CALL pmci_anterp_tophat( q, qc, kctu, iflo, ifuo, jflo, jfuo, kflo, & 3540 3584 kfuo, ijfc_s, 's' ) 3541 3585 ENDIF … … 3545 3589 3546 3590 3547 SUBROUTINE pmci_interp_tril_lr( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, &3548 r2z, kb, logc, logc_ratio, nzt_topo_nestbc, &3591 SUBROUTINE pmci_interp_tril_lr( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, & 3592 r2z, kb, logc, logc_ratio, nzt_topo_nestbc, & 3549 3593 edge, var ) 3550 3594 ! 3551 !-- Interpolation of ghost-node values used as the c lient-domain boundary3595 !-- Interpolation of ghost-node values used as the child-domain boundary 3552 3596 !-- conditions. This subroutine handles the left and right boundaries. It is 3553 3597 !-- based on trilinear interpolation. … … 3555 3599 IMPLICIT NONE 3556 3600 3557 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), &3601 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), & 3558 3602 INTENT(INOUT) :: f !: 3559 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), &3603 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), & 3560 3604 INTENT(IN) :: fc !: 3561 REAL(wp), DIMENSION(1:2,0:ncorr-1,nzb:nzt_topo_nestbc,nys:nyn), &3605 REAL(wp), DIMENSION(1:2,0:ncorr-1,nzb:nzt_topo_nestbc,nys:nyn), & 3562 3606 INTENT(IN) :: logc_ratio !: 3563 3607 REAL(wp), DIMENSION(nxlg:nxrg), INTENT(IN) :: r1x !: … … 3572 3616 INTEGER(iwp), DIMENSION(nysg:nyng,nxlg:nxrg), INTENT(IN) :: kb !: 3573 3617 INTEGER(iwp), DIMENSION(nzb:nzt+1), INTENT(IN) :: kc !: 3574 INTEGER(iwp), DIMENSION(1:2,nzb:nzt_topo_nestbc,nys:nyn), &3618 INTEGER(iwp), DIMENSION(1:2,nzb:nzt_topo_nestbc,nys:nyn), & 3575 3619 INTENT(IN) :: logc !: 3576 3620 INTEGER(iwp) :: nzt_topo_nestbc !: 3577 3621 3578 CHARACTER(LEN=1), INTENT(IN) :: edge !:3579 CHARACTER(LEN=1), INTENT(IN) :: var !:3622 CHARACTER(LEN=1), INTENT(IN) :: edge !: 3623 CHARACTER(LEN=1), INTENT(IN) :: var !: 3580 3624 3581 3625 INTEGER(iwp) :: i !: … … 3701 3745 DO kcorr = 0, ncorr-1 3702 3746 kco = k + kcorr 3703 f(kco,jco,i) = 0.5_wp * ( logc_ratio(1,kcorr,k,j) * &3704 f(k1,j,i) &3705 + logc_ratio(2,jcorr,k,j) * &3747 f(kco,jco,i) = 0.5_wp * ( logc_ratio(1,kcorr,k,j) * & 3748 f(k1,j,i) & 3749 + logc_ratio(2,jcorr,k,j) * & 3706 3750 f(k,j1,i) ) 3707 3751 ENDDO … … 3747 3791 3748 3792 3749 SUBROUTINE pmci_interp_tril_sn( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, &3750 r2z, kb, logc, logc_ratio, &3793 SUBROUTINE pmci_interp_tril_sn( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, & 3794 r2z, kb, logc, logc_ratio, & 3751 3795 nzt_topo_nestbc, edge, var ) 3752 3796 3753 3797 ! 3754 !-- Interpolation of ghost-node values used as the c lient-domain boundary3798 !-- Interpolation of ghost-node values used as the child-domain boundary 3755 3799 !-- conditions. This subroutine handles the south and north boundaries. 3756 3800 !-- This subroutine is based on trilinear interpolation. … … 3758 3802 IMPLICIT NONE 3759 3803 3760 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), &3804 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), & 3761 3805 INTENT(INOUT) :: f !: 3762 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), &3806 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), & 3763 3807 INTENT(IN) :: fc !: 3764 REAL(wp), DIMENSION(1:2,0:ncorr-1,nzb:nzt_topo_nestbc,nxl:nxr), &3808 REAL(wp), DIMENSION(1:2,0:ncorr-1,nzb:nzt_topo_nestbc,nxl:nxr), & 3765 3809 INTENT(IN) :: logc_ratio !: 3766 3810 REAL(wp), DIMENSION(nxlg:nxrg), INTENT(IN) :: r1x !: … … 3775 3819 INTEGER(iwp), DIMENSION(nysg:nyng,nxlg:nxrg), INTENT(IN) :: kb !: 3776 3820 INTEGER(iwp), DIMENSION(nzb:nzt+1), INTENT(IN) :: kc !: 3777 INTEGER(iwp), DIMENSION(1:2,nzb:nzt_topo_nestbc,nxl:nxr), &3821 INTEGER(iwp), DIMENSION(1:2,nzb:nzt_topo_nestbc,nxl:nxr), & 3778 3822 INTENT(IN) :: logc !: 3779 3823 INTEGER(iwp) :: nzt_topo_nestbc !: … … 3902 3946 DO kcorr = 0, ncorr-1 3903 3947 kco = k + kcorr 3904 f(kco,i,ico) = 0.5_wp * ( logc_ratio(1,kcorr,k,i) * &3948 f(kco,i,ico) = 0.5_wp * ( logc_ratio(1,kcorr,k,i) * & 3905 3949 f(k1,j,i) & 3906 + logc_ratio(2,icorr,k,i) * &3950 + logc_ratio(2,icorr,k,i) * & 3907 3951 f(k,j,i1) ) 3908 3952 ENDDO … … 3948 3992 3949 3993 3950 SUBROUTINE pmci_interp_tril_t( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, &3994 SUBROUTINE pmci_interp_tril_t( f, fc, ic, jc, kc, r1x, r2x, r1y, r2y, r1z, & 3951 3995 r2z, var ) 3952 3996 3953 3997 ! 3954 !-- Interpolation of ghost-node values used as the c lient-domain boundary3998 !-- Interpolation of ghost-node values used as the child-domain boundary 3955 3999 !-- conditions. This subroutine handles the top boundary. 3956 4000 !-- This subroutine is based on trilinear interpolation. … … 3958 4002 IMPLICIT NONE 3959 4003 3960 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), &4004 REAL(wp), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg), & 3961 4005 INTENT(INOUT) :: f !: 3962 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), &4006 REAL(wp), DIMENSION(0:cg%nz+1,jcs:jcn,icl:icr), & 3963 4007 INTENT(IN) :: fc !: 3964 4008 REAL(wp), DIMENSION(nxlg:nxrg), INTENT(IN) :: r1x !: … … 3969 4013 REAL(wp), DIMENSION(nzb:nzt+1), INTENT(IN) :: r2z !: 3970 4014 3971 INTEGER(iwp), DIMENSION(nxlg:nxrg), INTENT(IN) :: ic !:3972 INTEGER(iwp), DIMENSION(nysg:nyng), INTENT(IN) :: jc !:3973 INTEGER(iwp), DIMENSION(nzb:nzt+1), INTENT(IN) :: kc !:4015 INTEGER(iwp), DIMENSION(nxlg:nxrg), INTENT(IN) :: ic !: 4016 INTEGER(iwp), DIMENSION(nysg:nyng), INTENT(IN) :: jc !: 4017 INTEGER(iwp), DIMENSION(nzb:nzt+1), INTENT(IN) :: kc !: 3974 4018 3975 4019 CHARACTER(LEN=1), INTENT(IN) :: var !: … … 4038 4082 SUBROUTINE pmci_extrap_ifoutflow_lr( f, kb, edge, var ) 4039 4083 ! 4040 !-- After the interpolation of ghost-node values for the c lient-domain4084 !-- After the interpolation of ghost-node values for the child-domain 4041 4085 !-- boundary conditions, this subroutine checks if there is a local outflow 4042 4086 !-- through the boundary. In that case this subroutine overwrites the … … 4047 4091 IMPLICIT NONE 4048 4092 4049 CHARACTER(LEN=1), INTENT(IN) :: edge !:4050 CHARACTER(LEN=1), INTENT(IN) :: var !:4093 CHARACTER(LEN=1), INTENT(IN) :: edge !: 4094 CHARACTER(LEN=1), INTENT(IN) :: var !: 4051 4095 4052 4096 INTEGER(iwp) :: i !: … … 4116 4160 SUBROUTINE pmci_extrap_ifoutflow_sn( f, kb, edge, var ) 4117 4161 ! 4118 !-- After the interpolation of ghost-node values for the c lient-domain4162 !-- After the interpolation of ghost-node values for the child-domain 4119 4163 !-- boundary conditions, this subroutine checks if there is a local outflow 4120 4164 !-- through the boundary. In that case this subroutine overwrites the … … 4193 4237 SUBROUTINE pmci_extrap_ifoutflow_t( f, var ) 4194 4238 ! 4195 !-- Interpolation of ghost-node values used as the c lient-domain boundary4239 !-- Interpolation of ghost-node values used as the child-domain boundary 4196 4240 !-- conditions. This subroutine handles the top boundary. It is based on 4197 4241 !-- trilinear interpolation. … … 4208 4252 REAL(wp) :: vdotnor !: 4209 4253 4210 REAL(wp), DIMENSION(nzb:nzt+1,nys-nbgp:nyn+nbgp,nxl-nbgp:nxr+nbgp), &4254 REAL(wp), DIMENSION(nzb:nzt+1,nys-nbgp:nyn+nbgp,nxl-nbgp:nxr+nbgp), & 4211 4255 INTENT(INOUT) :: f !: 4212 4256 … … 4241 4285 4242 4286 4243 SUBROUTINE pmci_anterp_tophat( f, fc, kct, ifl, ifu, jfl, jfu, kfl, kfu, &4287 SUBROUTINE pmci_anterp_tophat( f, fc, kct, ifl, ifu, jfl, jfu, kfl, kfu, & 4244 4288 ijfc, var ) 4245 4289 ! 4246 !-- Anterpolation of internal-node values to be used as the server-domain4290 !-- Anterpolation of internal-node values to be used as the parent-domain 4247 4291 !-- values. This subroutine is based on the first-order numerical 4248 4292 !-- integration of the fine-grid values contained within the coarse-grid … … 4296 4340 !-- Note that kcb is simply zero and kct enters here as a parameter and it is 4297 4341 !-- determined in pmci_init_anterp_tophat 4298 IF ( nest_bound_l ) THEN 4299 IF ( var == 'u' ) THEN 4300 iclp = icl + nhll + 1 4301 ELSE 4342 4343 IF ( nesting_mode == 'vertical' ) THEN 4344 IF ( nest_bound_l ) THEN 4302 4345 iclp = icl + nhll 4303 4346 ENDIF 4304 ENDIF 4305 IF ( nest_bound_r ) THEN 4306 icrm = icr - nhlr 4307 ENDIF 4308 4309 IF ( nest_bound_s ) THEN 4310 IF ( var == 'v' ) THEN 4311 jcsp = jcs + nhls + 1 4312 ELSE 4347 IF ( nest_bound_r ) THEN 4348 icrm = icr - nhlr 4349 ENDIF 4350 IF ( nest_bound_s ) THEN 4313 4351 jcsp = jcs + nhls 4314 4352 ENDIF 4315 ENDIF 4316 IF ( nest_bound_n ) THEN 4317 jcnm = jcn - nhln 4318 ENDIF 4319 kcb = 0 4320 4353 IF ( nest_bound_n ) THEN 4354 jcnm = jcn - nhln 4355 ENDIF 4356 ELSE 4357 IF ( nest_bound_l ) THEN 4358 IF ( var == 'u' ) THEN 4359 iclp = icl + nhll + 1 4360 ELSE 4361 iclp = icl + nhll 4362 ENDIF 4363 ENDIF 4364 IF ( nest_bound_r ) THEN 4365 icrm = icr - nhlr 4366 ENDIF 4367 4368 IF ( nest_bound_s ) THEN 4369 IF ( var == 'v' ) THEN 4370 jcsp = jcs + nhls + 1 4371 ELSE 4372 jcsp = jcs + nhls 4373 ENDIF 4374 ENDIF 4375 IF ( nest_bound_n ) THEN 4376 jcnm = jcn - nhln 4377 ENDIF 4378 kcb = 0 4379 ENDIF 4380 4321 4381 ! 4322 4382 !-- Note that ii, jj, and kk are coarse-grid indices and i,j, and k … … 4349 4409 ENDIF 4350 4410 4351 fc(kk,jj,ii) = ( 1.0_wp - fra ) * fc(kk,jj,ii) + &4411 fc(kk,jj,ii) = ( 1.0_wp - fra ) * fc(kk,jj,ii) + & 4352 4412 fra * cellsum / REAL( nfc, KIND = wp ) 4353 4413 … … 4359 4419 4360 4420 #endif 4361 END SUBROUTINE pmci_c lient_datatrans4421 END SUBROUTINE pmci_child_datatrans 4362 4422 4363 4423 END MODULE pmc_interface -
palm/trunk/SOURCE/pmc_mpi_wrapper_mod.f90
r1901 r1933 1 1 MODULE pmc_mpi_wrapper 2 2 3 !------------------------------------------------------------------------------- -!3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------- -!18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: 21 21 ! ------------------ 22 22 ! 23 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 27 ! 28 ! 1901 2016-05-04 15:39:38Z raasch 29 ! Code clean up. The words server/client changed to parent/child. 30 ! 28 31 ! 1900 2016-05-04 15:27:53Z raasch 29 32 ! re-formatted to match PALM style … … 50 53 ! 51 54 ! MPI Wrapper of Palm Model Coupler 52 !------------------------------------------------------------------------------ !55 !-------------------------------------------------------------------------------! 53 56 54 57 #if defined( __parallel ) … … 62 65 63 66 USE kinds 64 USE pmc_handle_communicator, &65 ONLY: m_model_comm, m_model_rank, m_to_ server_comm, m_to_client_comm67 USE pmc_handle_communicator, & 68 ONLY: m_model_comm, m_model_rank, m_to_parent_comm, m_to_child_comm 66 69 67 70 IMPLICIT NONE … … 70 73 SAVE 71 74 72 INTERFACE pmc_send_to_ server73 MODULE PROCEDURE pmc_send_to_ server_integer74 MODULE PROCEDURE pmc_send_to_ server_integer_275 MODULE PROCEDURE pmc_send_to_ server_real_r176 MODULE PROCEDURE pmc_send_to_ server_real_r277 MODULE PROCEDURE pmc_send_to_ server_real_r378 END INTERFACE pmc_send_to_ server79 80 INTERFACE pmc_recv_from_ server81 MODULE PROCEDURE pmc_recv_from_ server_integer82 MODULE PROCEDURE pmc_recv_from_ server_real_r183 MODULE PROCEDURE pmc_recv_from_ server_real_r284 MODULE PROCEDURE pmc_recv_from_ server_real_r385 END INTERFACE pmc_recv_from_ server86 87 INTERFACE pmc_send_to_c lient88 MODULE PROCEDURE pmc_send_to_c lient_integer89 MODULE PROCEDURE pmc_send_to_c lient_real_r190 MODULE PROCEDURE pmc_send_to_c lient_real_r291 MODULE PROCEDURE pmc_send_to_c lient_real_r392 END INTERFACE pmc_send_to_c lient93 94 INTERFACE pmc_recv_from_c lient95 MODULE PROCEDURE pmc_recv_from_c lient_integer96 MODULE PROCEDURE pmc_recv_from_c lient_integer_297 MODULE PROCEDURE pmc_recv_from_c lient_real_r198 MODULE PROCEDURE pmc_recv_from_c lient_real_r299 MODULE PROCEDURE pmc_recv_from_c lient_real_r3100 END INTERFACE pmc_recv_from_c lient75 INTERFACE pmc_send_to_parent 76 MODULE PROCEDURE pmc_send_to_parent_integer 77 MODULE PROCEDURE pmc_send_to_parent_integer_2 78 MODULE PROCEDURE pmc_send_to_parent_real_r1 79 MODULE PROCEDURE pmc_send_to_parent_real_r2 80 MODULE PROCEDURE pmc_send_to_parent_real_r3 81 END INTERFACE pmc_send_to_parent 82 83 INTERFACE pmc_recv_from_parent 84 MODULE PROCEDURE pmc_recv_from_parent_integer 85 MODULE PROCEDURE pmc_recv_from_parent_real_r1 86 MODULE PROCEDURE pmc_recv_from_parent_real_r2 87 MODULE PROCEDURE pmc_recv_from_parent_real_r3 88 END INTERFACE pmc_recv_from_parent 89 90 INTERFACE pmc_send_to_child 91 MODULE PROCEDURE pmc_send_to_child_integer 92 MODULE PROCEDURE pmc_send_to_child_real_r1 93 MODULE PROCEDURE pmc_send_to_child_real_r2 94 MODULE PROCEDURE pmc_send_to_child_real_r3 95 END INTERFACE pmc_send_to_child 96 97 INTERFACE pmc_recv_from_child 98 MODULE PROCEDURE pmc_recv_from_child_integer 99 MODULE PROCEDURE pmc_recv_from_child_integer_2 100 MODULE PROCEDURE pmc_recv_from_child_real_r1 101 MODULE PROCEDURE pmc_recv_from_child_real_r2 102 MODULE PROCEDURE pmc_recv_from_child_real_r3 103 END INTERFACE pmc_recv_from_child 101 104 102 105 INTERFACE pmc_bcast … … 118 121 END INTERFACE pmc_time 119 122 120 PUBLIC pmc_alloc_mem, pmc_bcast, pmc_inter_bcast, pmc_recv_from_c lient,&121 pmc_recv_from_ server, pmc_send_to_client, pmc_send_to_server,&123 PUBLIC pmc_alloc_mem, pmc_bcast, pmc_inter_bcast, pmc_recv_from_child, & 124 pmc_recv_from_parent, pmc_send_to_child, pmc_send_to_parent, & 122 125 pmc_time 123 126 … … 125 128 126 129 127 SUBROUTINE pmc_send_to_ server_integer( buf, n, server_rank, tag, ierr )130 SUBROUTINE pmc_send_to_parent_integer( buf, n, parent_rank, tag, ierr ) 128 131 129 132 IMPLICIT NONE … … 131 134 INTEGER, DIMENSION(:), INTENT(IN) :: buf !< 132 135 INTEGER, INTENT(IN) :: n !< 133 INTEGER, INTENT(IN) :: server_rank !<136 INTEGER, INTENT(IN) :: parent_rank !< 134 137 INTEGER, INTENT(IN) :: tag !< 135 138 INTEGER, INTENT(OUT) :: ierr !< 136 139 137 140 ierr = 0 138 CALL MPI_SEND( buf, n, MPI_INTEGER, server_rank, tag, m_to_server_comm,&141 CALL MPI_SEND( buf, n, MPI_INTEGER, parent_rank, tag, m_to_parent_comm, & 139 142 ierr) 140 143 141 END SUBROUTINE pmc_send_to_ server_integer142 143 144 145 SUBROUTINE pmc_recv_from_ server_integer( buf, n, server_rank, tag, ierr )144 END SUBROUTINE pmc_send_to_parent_integer 145 146 147 148 SUBROUTINE pmc_recv_from_parent_integer( buf, n, parent_rank, tag, ierr ) 146 149 147 150 IMPLICIT NONE … … 149 152 INTEGER, DIMENSION(:), INTENT(OUT) :: buf !< 150 153 INTEGER, INTENT(IN) :: n !< 151 INTEGER, INTENT(IN) :: server_rank !<154 INTEGER, INTENT(IN) :: parent_rank !< 152 155 INTEGER, INTENT(IN) :: tag !< 153 156 INTEGER, INTENT(OUT) :: ierr !< 154 157 155 158 ierr = 0 156 CALL MPI_RECV( buf, n, MPI_INTEGER, server_rank, tag, m_to_server_comm,&159 CALL MPI_RECV( buf, n, MPI_INTEGER, parent_rank, tag, m_to_parent_comm, & 157 160 MPI_STATUS_IGNORE, ierr ) 158 161 159 END SUBROUTINE pmc_recv_from_ server_integer160 161 162 163 SUBROUTINE pmc_send_to_ server_integer_2( buf, n, server_rank, tag, ierr )162 END SUBROUTINE pmc_recv_from_parent_integer 163 164 165 166 SUBROUTINE pmc_send_to_parent_integer_2( buf, n, parent_rank, tag, ierr ) 164 167 165 168 IMPLICIT NONE … … 167 170 INTEGER, DIMENSION(:,:), INTENT(IN) :: buf !< 168 171 INTEGER, INTENT(IN) :: n !< 169 INTEGER, INTENT(IN) :: server_rank !<172 INTEGER, INTENT(IN) :: parent_rank !< 170 173 INTEGER, INTENT(IN) :: tag !< 171 174 INTEGER, INTENT(OUT) :: ierr !< 172 175 173 176 ierr = 0 174 CALL MPI_SEND( buf, n, MPI_INTEGER, server_rank, tag, m_to_server_comm,&177 CALL MPI_SEND( buf, n, MPI_INTEGER, parent_rank, tag, m_to_parent_comm, & 175 178 ierr ) 176 179 177 END SUBROUTINE pmc_send_to_ server_integer_2178 179 180 181 SUBROUTINE pmc_send_to_ server_real_r1( buf, n, server_rank, tag, ierr )180 END SUBROUTINE pmc_send_to_parent_integer_2 181 182 183 184 SUBROUTINE pmc_send_to_parent_real_r1( buf, n, parent_rank, tag, ierr ) 182 185 183 186 IMPLICIT NONE … … 185 188 REAL(wp), DIMENSION(:), INTENT(IN) :: buf !< 186 189 INTEGER, INTENT(IN) :: n !< 187 INTEGER, INTENT(IN) :: server_rank !<190 INTEGER, INTENT(IN) :: parent_rank !< 188 191 INTEGER, INTENT(IN) :: tag !< 189 192 INTEGER, INTENT(OUT) :: ierr !< 190 193 191 194 ierr = 0 192 CALL MPI_SEND( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm, ierr )193 194 END SUBROUTINE pmc_send_to_ server_real_r1195 196 197 198 SUBROUTINE pmc_recv_from_ server_real_r1( buf, n, server_rank, tag, ierr )195 CALL MPI_SEND( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, ierr ) 196 197 END SUBROUTINE pmc_send_to_parent_real_r1 198 199 200 201 SUBROUTINE pmc_recv_from_parent_real_r1( buf, n, parent_rank, tag, ierr ) 199 202 200 203 IMPLICIT NONE … … 202 205 REAL(wp), DIMENSION(:), INTENT(OUT) :: buf !< 203 206 INTEGER, INTENT(IN) :: n !< 204 INTEGER, INTENT(IN) :: server_rank !<207 INTEGER, INTENT(IN) :: parent_rank !< 205 208 INTEGER, INTENT(IN) :: tag !< 206 209 INTEGER, INTENT(OUT) :: ierr !< 207 210 208 211 ierr = 0 209 CALL MPI_RECV( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm,&212 CALL MPI_RECV( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, & 210 213 MPI_STATUS_IGNORE, ierr ) 211 214 212 END SUBROUTINE pmc_recv_from_ server_real_r1213 214 215 216 SUBROUTINE pmc_send_to_ server_real_r2( buf, n, server_rank, tag, ierr )215 END SUBROUTINE pmc_recv_from_parent_real_r1 216 217 218 219 SUBROUTINE pmc_send_to_parent_real_r2( buf, n, parent_rank, tag, ierr ) 217 220 218 221 IMPLICIT NONE … … 220 223 REAL(wp), DIMENSION(:,:), INTENT(IN) :: buf !< 221 224 INTEGER, INTENT(IN) :: n !< 222 INTEGER, INTENT(IN) :: server_rank !<225 INTEGER, INTENT(IN) :: parent_rank !< 223 226 INTEGER, INTENT(IN) :: tag !< 224 227 INTEGER, INTENT(OUT) :: ierr !< 225 228 226 229 ierr = 0 227 CALL MPI_SEND( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm, ierr )228 229 END SUBROUTINE pmc_send_to_ server_real_r2230 231 232 SUBROUTINE pmc_recv_from_ server_real_r2( buf, n, server_rank, tag, ierr )230 CALL MPI_SEND( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, ierr ) 231 232 END SUBROUTINE pmc_send_to_parent_real_r2 233 234 235 SUBROUTINE pmc_recv_from_parent_real_r2( buf, n, parent_rank, tag, ierr ) 233 236 234 237 IMPLICIT NONE … … 236 239 REAL(wp), DIMENSION(:,:), INTENT(OUT) :: buf !< 237 240 INTEGER, INTENT(IN) :: n !< 238 INTEGER, INTENT(IN) :: server_rank !<241 INTEGER, INTENT(IN) :: parent_rank !< 239 242 INTEGER, INTENT(IN) :: tag !< 240 243 INTEGER, INTENT(OUT) :: ierr !< 241 244 242 245 ierr = 0 243 CALL MPI_RECV( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm,&246 CALL MPI_RECV( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, & 244 247 MPI_STATUS_IGNORE, ierr ) 245 248 246 END SUBROUTINE pmc_recv_from_ server_real_r2247 248 249 250 SUBROUTINE pmc_send_to_ server_real_r3( buf, n, server_rank, tag, ierr )249 END SUBROUTINE pmc_recv_from_parent_real_r2 250 251 252 253 SUBROUTINE pmc_send_to_parent_real_r3( buf, n, parent_rank, tag, ierr ) 251 254 252 255 IMPLICIT NONE … … 254 257 REAL(wp), DIMENSION(:,:,:), INTENT(IN) :: buf !< 255 258 INTEGER, INTENT(IN) :: n !< 256 INTEGER, INTENT(IN) :: server_rank !<259 INTEGER, INTENT(IN) :: parent_rank !< 257 260 INTEGER, INTENT(IN) :: tag !< 258 261 INTEGER, INTENT(OUT) :: ierr !< 259 262 260 263 ierr = 0 261 CALL MPI_SEND( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm, ierr )262 263 END SUBROUTINE pmc_send_to_ server_real_r3264 265 266 267 SUBROUTINE pmc_recv_from_ server_real_r3( buf, n, server_rank, tag, ierr )264 CALL MPI_SEND( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, ierr ) 265 266 END SUBROUTINE pmc_send_to_parent_real_r3 267 268 269 270 SUBROUTINE pmc_recv_from_parent_real_r3( buf, n, parent_rank, tag, ierr ) 268 271 269 272 IMPLICIT NONE … … 271 274 REAL(wp), DIMENSION(:,:,:), INTENT(OUT) :: buf !< 272 275 INTEGER, INTENT(IN) :: n !< 273 INTEGER, INTENT(IN) :: server_rank !<276 INTEGER, INTENT(IN) :: parent_rank !< 274 277 INTEGER, INTENT(IN) :: tag !< 275 278 INTEGER, INTENT(OUT) :: ierr !< 276 279 277 280 ierr = 0 278 CALL MPI_RECV( buf, n, MPI_REAL, server_rank, tag, m_to_server_comm,&281 CALL MPI_RECV( buf, n, MPI_REAL, parent_rank, tag, m_to_parent_comm, & 279 282 MPI_STATUS_IGNORE, ierr ) 280 283 281 END SUBROUTINE pmc_recv_from_ server_real_r3282 283 284 285 SUBROUTINE pmc_send_to_c lient_integer( client_id, buf, n, client_rank, tag,&286 287 288 IMPLICIT NONE 289 290 INTEGER, INTENT(IN) :: c lient_id!<284 END SUBROUTINE pmc_recv_from_parent_real_r3 285 286 287 288 SUBROUTINE pmc_send_to_child_integer( child_id, buf, n, child_rank, tag, & 289 ierr ) 290 291 IMPLICIT NONE 292 293 INTEGER, INTENT(IN) :: child_id !< 291 294 INTEGER, DIMENSION(:), INTENT(IN) :: buf !< 292 295 INTEGER, INTENT(IN) :: n !< 293 INTEGER, INTENT(IN) :: c lient_rank!<296 INTEGER, INTENT(IN) :: child_rank !< 294 297 INTEGER, INTENT(IN) :: tag !< 295 298 INTEGER, INTENT(OUT) :: ierr !< 296 299 297 300 ierr = 0 298 CALL MPI_SEND( buf, n, MPI_INTEGER, c lient_rank, tag,&299 m_to_c lient_comm(client_id), ierr )300 301 END SUBROUTINE pmc_send_to_c lient_integer302 303 304 305 SUBROUTINE pmc_recv_from_c lient_integer( client_id, buf, n, client_rank, tag,&306 307 308 IMPLICIT NONE 309 310 INTEGER, INTENT(IN) :: c lient_id!<301 CALL MPI_SEND( buf, n, MPI_INTEGER, child_rank, tag, & 302 m_to_child_comm(child_id), ierr ) 303 304 END SUBROUTINE pmc_send_to_child_integer 305 306 307 308 SUBROUTINE pmc_recv_from_child_integer( child_id, buf, n, child_rank, tag, & 309 ierr ) 310 311 IMPLICIT NONE 312 313 INTEGER, INTENT(IN) :: child_id !< 311 314 INTEGER, DIMENSION(:), INTENT(INOUT) :: buf !< 312 315 INTEGER, INTENT(IN) :: n !< 313 INTEGER, INTENT(IN) :: c lient_rank!<316 INTEGER, INTENT(IN) :: child_rank !< 314 317 INTEGER, INTENT(IN) :: tag !< 315 318 INTEGER, INTENT(OUT) :: ierr !< 316 319 317 320 ierr = 0 318 CALL MPI_RECV( buf, n, MPI_INTEGER, c lient_rank, tag,&319 m_to_c lient_comm(client_id), MPI_STATUS_IGNORE, ierr )320 321 END SUBROUTINE pmc_recv_from_c lient_integer322 323 324 325 SUBROUTINE pmc_recv_from_c lient_integer_2( client_id, buf, n, client_rank,&326 327 328 IMPLICIT NONE 329 330 INTEGER, INTENT(IN) :: c lient_id!<321 CALL MPI_RECV( buf, n, MPI_INTEGER, child_rank, tag, & 322 m_to_child_comm(child_id), MPI_STATUS_IGNORE, ierr ) 323 324 END SUBROUTINE pmc_recv_from_child_integer 325 326 327 328 SUBROUTINE pmc_recv_from_child_integer_2( child_id, buf, n, child_rank, & 329 tag, ierr ) 330 331 IMPLICIT NONE 332 333 INTEGER, INTENT(IN) :: child_id !< 331 334 INTEGER, DIMENSION(:,:), INTENT(OUT) :: buf !< 332 335 INTEGER, INTENT(IN) :: n !< 333 INTEGER, INTENT(IN) :: c lient_rank!<336 INTEGER, INTENT(IN) :: child_rank !< 334 337 INTEGER, INTENT(IN) :: tag !< 335 338 INTEGER, INTENT(OUT) :: ierr !< 336 339 337 340 ierr = 0 338 CALL MPI_RECV( buf, n, MPI_INTEGER, c lient_rank, tag,&339 m_to_c lient_comm(client_id), MPI_STATUS_IGNORE, ierr )340 341 END SUBROUTINE pmc_recv_from_c lient_integer_2342 343 344 345 SUBROUTINE pmc_send_to_c lient_real_r1( client_id, buf, n, client_rank, tag,&346 347 348 IMPLICIT NONE 349 350 INTEGER, INTENT(IN) :: c lient_id!<341 CALL MPI_RECV( buf, n, MPI_INTEGER, child_rank, tag, & 342 m_to_child_comm(child_id), MPI_STATUS_IGNORE, ierr ) 343 344 END SUBROUTINE pmc_recv_from_child_integer_2 345 346 347 348 SUBROUTINE pmc_send_to_child_real_r1( child_id, buf, n, child_rank, tag, & 349 ierr ) 350 351 IMPLICIT NONE 352 353 INTEGER, INTENT(IN) :: child_id !< 351 354 REAL(wp), DIMENSION(:), INTENT(IN) :: buf !< 352 355 INTEGER, INTENT(IN) :: n !< 353 INTEGER, INTENT(IN) :: c lient_rank!<356 INTEGER, INTENT(IN) :: child_rank !< 354 357 INTEGER, INTENT(IN) :: tag !< 355 358 INTEGER, INTENT(OUT) :: ierr !< 356 359 357 360 ierr = 0 358 CALL MPI_SEND( buf, n, MPI_REAL, c lient_rank, tag,&359 m_to_c lient_comm(client_id), ierr )360 361 END SUBROUTINE pmc_send_to_c lient_real_r1362 363 364 365 SUBROUTINE pmc_recv_from_c lient_real_r1( client_id, buf, n, client_rank, tag,&366 367 368 IMPLICIT NONE 369 370 INTEGER, INTENT(IN) :: c lient_id!<361 CALL MPI_SEND( buf, n, MPI_REAL, child_rank, tag, & 362 m_to_child_comm(child_id), ierr ) 363 364 END SUBROUTINE pmc_send_to_child_real_r1 365 366 367 368 SUBROUTINE pmc_recv_from_child_real_r1( child_id, buf, n, child_rank, tag, & 369 ierr ) 370 371 IMPLICIT NONE 372 373 INTEGER, INTENT(IN) :: child_id !< 371 374 REAL(wp), DIMENSION(:), INTENT(INOUT) :: buf !< 372 375 INTEGER, INTENT(IN) :: n !< 373 INTEGER, INTENT(IN) :: c lient_rank!<376 INTEGER, INTENT(IN) :: child_rank !< 374 377 INTEGER, INTENT(IN) :: tag !< 375 378 INTEGER, INTENT(OUT) :: ierr !< 376 379 377 380 ierr = 0 378 CALL MPI_RECV( buf, n, MPI_REAL, c lient_rank, tag,&379 m_to_c lient_comm(client_id), MPI_STATUS_IGNORE, ierr )380 381 END SUBROUTINE pmc_recv_from_c lient_real_r1382 383 384 385 SUBROUTINE pmc_send_to_c lient_real_r2( client_id, buf, n, client_rank, tag,&386 387 388 IMPLICIT NONE 389 390 INTEGER, INTENT(IN) :: c lient_id!<381 CALL MPI_RECV( buf, n, MPI_REAL, child_rank, tag, & 382 m_to_child_comm(child_id), MPI_STATUS_IGNORE, ierr ) 383 384 END SUBROUTINE pmc_recv_from_child_real_r1 385 386 387 388 SUBROUTINE pmc_send_to_child_real_r2( child_id, buf, n, child_rank, tag, & 389 ierr ) 390 391 IMPLICIT NONE 392 393 INTEGER, INTENT(IN) :: child_id !< 391 394 REAL(wp), DIMENSION(:,:), INTENT(IN) :: buf !< 392 395 INTEGER, INTENT(IN) :: n !< 393 INTEGER, INTENT(IN) :: c lient_rank!<396 INTEGER, INTENT(IN) :: child_rank !< 394 397 INTEGER, INTENT(IN) :: tag !< 395 398 INTEGER, INTENT(OUT) :: ierr !< 396 399 397 400 ierr = 0 398 CALL MPI_SEND( buf, n, MPI_REAL, c lient_rank, tag,&399 m_to_c lient_comm(client_id), ierr )400 401 END SUBROUTINE pmc_send_to_c lient_real_r2402 403 404 405 SUBROUTINE pmc_recv_from_c lient_real_r2( client_id, buf, n, client_rank, tag,&406 407 408 IMPLICIT NONE 409 410 INTEGER, INTENT(IN) :: c lient_id!<401 CALL MPI_SEND( buf, n, MPI_REAL, child_rank, tag, & 402 m_to_child_comm(child_id), ierr ) 403 404 END SUBROUTINE pmc_send_to_child_real_r2 405 406 407 408 SUBROUTINE pmc_recv_from_child_real_r2( child_id, buf, n, child_rank, tag, & 409 ierr ) 410 411 IMPLICIT NONE 412 413 INTEGER, INTENT(IN) :: child_id !< 411 414 REAL(wp), DIMENSION(:,:), INTENT(OUT) :: buf !< 412 415 INTEGER, INTENT(IN) :: n !< 413 INTEGER, INTENT(IN) :: c lient_rank!<416 INTEGER, INTENT(IN) :: child_rank !< 414 417 INTEGER, INTENT(IN) :: tag !< 415 418 INTEGER, INTENT(OUT) :: ierr !< 416 419 417 420 ierr = 0 418 CALL MPI_RECV( buf, n, MPI_REAL, c lient_rank, tag,&419 m_to_c lient_comm(client_id), MPI_STATUS_IGNORE, ierr )420 421 END SUBROUTINE pmc_recv_from_c lient_real_r2422 423 424 425 SUBROUTINE pmc_send_to_c lient_real_r3( client_id, buf, n, client_rank, tag,&426 427 428 IMPLICIT NONE 429 430 INTEGER, INTENT(IN) :: c lient_id!<421 CALL MPI_RECV( buf, n, MPI_REAL, child_rank, tag, & 422 m_to_child_comm(child_id), MPI_STATUS_IGNORE, ierr ) 423 424 END SUBROUTINE pmc_recv_from_child_real_r2 425 426 427 428 SUBROUTINE pmc_send_to_child_real_r3( child_id, buf, n, child_rank, tag, & 429 ierr) 430 431 IMPLICIT NONE 432 433 INTEGER, INTENT(IN) :: child_id !< 431 434 REAL(wp), DIMENSION(:,:,:), INTENT(IN) :: buf !< 432 435 INTEGER, INTENT(IN) :: n !< 433 INTEGER, INTENT(IN) :: c lient_rank!<436 INTEGER, INTENT(IN) :: child_rank !< 434 437 INTEGER, INTENT(IN) :: tag !< 435 438 INTEGER, INTENT(OUT) :: ierr !< 436 439 437 440 ierr = 0 438 CALL MPI_SEND( buf, n, MPI_REAL, c lient_rank, tag,&439 m_to_c lient_comm(client_id), ierr )440 441 END SUBROUTINE pmc_send_to_c lient_real_r3442 443 444 445 SUBROUTINE pmc_recv_from_c lient_real_r3( client_id, buf, n, client_rank, tag,&446 447 448 IMPLICIT NONE 449 450 INTEGER, INTENT(IN) :: c lient_id!<441 CALL MPI_SEND( buf, n, MPI_REAL, child_rank, tag, & 442 m_to_child_comm(child_id), ierr ) 443 444 END SUBROUTINE pmc_send_to_child_real_r3 445 446 447 448 SUBROUTINE pmc_recv_from_child_real_r3( child_id, buf, n, child_rank, tag, & 449 ierr ) 450 451 IMPLICIT NONE 452 453 INTEGER, INTENT(IN) :: child_id !< 451 454 REAL(wp), DIMENSION(:,:,:), INTENT(OUT) :: buf !< 452 455 INTEGER, INTENT(IN) :: n !< 453 INTEGER, INTENT(IN) :: c lient_rank!<456 INTEGER, INTENT(IN) :: child_rank !< 454 457 INTEGER, INTENT(IN) :: tag !< 455 458 INTEGER, INTENT(OUT) :: ierr !< 456 459 457 460 ierr = 0 458 CALL MPI_RECV( buf, n, MPI_REAL, c lient_rank, tag, &459 m_to_c lient_comm(client_id), MPI_STATUS_IGNORE, ierr )460 461 END SUBROUTINE pmc_recv_from_c lient_real_r3461 CALL MPI_RECV( buf, n, MPI_REAL, child_rank, tag, & 462 m_to_child_comm(child_id), MPI_STATUS_IGNORE, ierr ) 463 464 END SUBROUTINE pmc_recv_from_child_real_r3 462 465 463 466 … … 520 523 521 524 522 SUBROUTINE pmc_inter_bcast_integer_1( buf, c lient_id, ierr )525 SUBROUTINE pmc_inter_bcast_integer_1( buf, child_id, ierr ) 523 526 524 527 IMPLICIT NONE 525 528 526 529 INTEGER, INTENT(INOUT),DIMENSION(:) :: buf !< 527 INTEGER, INTENT(IN),optional :: c lient_id!<530 INTEGER, INTENT(IN),optional :: child_id !< 528 531 INTEGER, INTENT(OUT),optional :: ierr !< 529 532 … … 533 536 534 537 ! 535 !-- PE 0 server broadcast to all clientPEs536 IF ( PRESENT( c lient_id ) ) THEN537 538 mycomm = m_to_c lient_comm(client_id)538 !-- PE 0 on parent broadcast to all child PEs 539 IF ( PRESENT( child_id ) ) THEN 540 541 mycomm = m_to_child_comm(child_id) 539 542 540 543 IF ( m_model_rank == 0 ) THEN … … 545 548 546 549 ELSE 547 mycomm = m_to_ server_comm550 mycomm = m_to_parent_comm 548 551 root_pe = 0 549 552 ENDIF -
palm/trunk/SOURCE/pmc_parent_mod.f90
r1927 r1933 1 MODULE pmc_ server2 3 !------------------------------------------------------------------------------- -!1 MODULE pmc_parent 2 3 !-------------------------------------------------------------------------------! 4 4 ! This file is part of PALM. 5 5 ! … … 16 16 ! 17 17 ! Copyright 1997-2016 Leibniz Universitaet Hannover 18 !------------------------------------------------------------------------------- -!18 !-------------------------------------------------------------------------------! 19 19 ! 20 20 ! Current revisions: 21 21 ! ------------------ 22 22 ! 23 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 ! 28 ! 1901 2016-05-04 15:39:38Z raasch 29 ! Module renamed. Code clean up. The words server/client changed to parent/child. 27 30 ! 28 31 ! 1900 2016-05-04 15:27:53Z raasch … … 47 50 ! 48 51 ! 1786 2016-03-08 05:49:27Z raasch 49 ! change in c lient-server data transfer: server now gets data from client50 ! instead that c lient put's it to the server52 ! change in child-parent data transfer: parent now gets data from child 53 ! instead that child put's it to the parent 51 54 ! 52 55 ! 1779 2016-03-03 08:01:28Z raasch … … 68 71 ! ------------ 69 72 ! 70 ! Serverpart of Palm Model Coupler71 !------------------------------------------------------------------------------ !73 ! Parent part of Palm Model Coupler 74 !-------------------------------------------------------------------------------! 72 75 73 76 #if defined( __parallel ) … … 80 83 #endif 81 84 USE kinds 82 USE pmc_general, &83 ONLY: arraydef, c lientdef, da_namedef, da_namelen, pedef,&85 USE pmc_general, & 86 ONLY: arraydef, childdef, da_namedef, da_namelen, pedef, & 84 87 pmc_g_setname, pmc_max_array, pmc_max_models, pmc_sort 85 88 86 USE pmc_handle_communicator, &87 ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_c lient_comm,&88 m_world_rank, pmc_ server_for_client89 90 USE pmc_mpi_wrapper, &89 USE pmc_handle_communicator, & 90 ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_child_comm, & 91 m_world_rank, pmc_parent_for_child 92 93 USE pmc_mpi_wrapper, & 91 94 ONLY: pmc_alloc_mem, pmc_bcast, pmc_time 92 95 … … 96 99 SAVE 97 100 98 TYPE c lientindexdef101 TYPE childindexdef 99 102 INTEGER :: nrpoints !< 100 103 INTEGER, DIMENSION(:,:), ALLOCATABLE :: index_list_2d !< 101 END TYPE c lientindexdef102 103 TYPE(c lientdef), DIMENSION(pmc_max_models) :: clients!<104 TYPE(c lientindexdef), DIMENSION(pmc_max_models) :: indclients!<104 END TYPE childindexdef 105 106 TYPE(childdef), DIMENSION(pmc_max_models) :: children !< 107 TYPE(childindexdef), DIMENSION(pmc_max_models) :: indchildren !< 105 108 106 109 INTEGER :: next_array_in_list = 0 !< 107 110 108 111 109 PUBLIC pmc_ server_for_client110 111 112 INTERFACE pmc_ serverinit113 MODULE PROCEDURE pmc_ serverinit114 END INTERFACE pmc_ serverinit112 PUBLIC pmc_parent_for_child 113 114 115 INTERFACE pmc_parentinit 116 MODULE PROCEDURE pmc_parentinit 117 END INTERFACE pmc_parentinit 115 118 116 119 INTERFACE pmc_s_set_2d_index_list … … 147 150 END INTERFACE pmc_s_set_active_data_array 148 151 149 PUBLIC pmc_ serverinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer,&150 pmc_s_getdata_from_buffer, pmc_s_getnextarray, &151 pmc_s_setind_and_allocmem, pmc_s_set_active_data_array, &152 PUBLIC pmc_parentinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer, & 153 pmc_s_getdata_from_buffer, pmc_s_getnextarray, & 154 pmc_s_setind_and_allocmem, pmc_s_set_active_data_array, & 152 155 pmc_s_set_dataarray, pmc_s_set_2d_index_list 153 156 … … 155 158 156 159 157 SUBROUTINE pmc_ serverinit160 SUBROUTINE pmc_parentinit 158 161 159 162 IMPLICIT NONE 160 163 161 INTEGER :: c lientid!<164 INTEGER :: childid !< 162 165 INTEGER :: i !< 163 166 INTEGER :: j !< … … 165 168 166 169 167 DO i = 1, SIZE( pmc_server_for_client )-1 168 169 clientid = pmc_server_for_client( i ) 170 171 clients(clientid)%model_comm = m_model_comm 172 clients(clientid)%inter_comm = m_to_client_comm(clientid) 170 DO i = 1, SIZE( pmc_parent_for_child )-1 171 172 childid = pmc_parent_for_child( i ) 173 174 children(childid)%model_comm = m_model_comm 175 children(childid)%inter_comm = m_to_child_comm(childid) 176 173 177 ! 174 178 !-- Get rank and size 175 CALL MPI_COMM_RANK( clients(clientid)%model_comm, & 176 clients(clientid)%model_rank, istat ) 177 CALL MPI_COMM_SIZE( clients(clientid)%model_comm, & 178 clients(clientid)%model_npes, istat ) 179 CALL MPI_COMM_REMOTE_SIZE( clients(clientid)%inter_comm, & 180 clients(clientid)%inter_npes, istat ) 179 CALL MPI_COMM_RANK( children(childid)%model_comm, & 180 children(childid)%model_rank, istat ) 181 CALL MPI_COMM_SIZE( children(childid)%model_comm, & 182 children(childid)%model_npes, istat ) 183 CALL MPI_COMM_REMOTE_SIZE( children(childid)%inter_comm, & 184 children(childid)%inter_npes, istat ) 185 181 186 ! 182 187 !-- Intra communicater is used for MPI_GET 183 CALL MPI_INTERCOMM_MERGE( clients(clientid)%inter_comm, .FALSE., & 184 clients(clientid)%intra_comm, istat ) 185 CALL MPI_COMM_RANK( clients(clientid)%intra_comm, & 186 clients(clientid)%intra_rank, istat ) 187 188 ALLOCATE( clients(clientid)%pes(clients(clientid)%inter_npes)) 189 ! 190 !-- Allocate array of TYPE arraydef for all client PEs to store information 188 CALL MPI_INTERCOMM_MERGE( children(childid)%inter_comm, .FALSE., & 189 children(childid)%intra_comm, istat ) 190 CALL MPI_COMM_RANK( children(childid)%intra_comm, & 191 children(childid)%intra_rank, istat ) 192 193 ALLOCATE( children(childid)%pes(children(childid)%inter_npes)) 194 195 ! 196 !-- Allocate array of TYPE arraydef for all child PEs to store information 191 197 !-- of the transfer array 192 DO j = 1, c lients(clientid)%inter_npes193 ALLOCATE( c lients(clientid)%pes(j)%array_list(pmc_max_array) )198 DO j = 1, children(childid)%inter_npes 199 ALLOCATE( children(childid)%pes(j)%array_list(pmc_max_array) ) 194 200 ENDDO 195 201 196 CALL get_da_names_from_c lient (clientid)197 198 ENDDO 199 200 END SUBROUTINE pmc_ serverinit201 202 203 204 SUBROUTINE pmc_s_set_2d_index_list( c lientid, index_list )202 CALL get_da_names_from_child (childid) 203 204 ENDDO 205 206 END SUBROUTINE pmc_parentinit 207 208 209 210 SUBROUTINE pmc_s_set_2d_index_list( childid, index_list ) 205 211 206 212 IMPLICIT NONE 207 213 208 INTEGER, INTENT(IN) :: c lientid!<214 INTEGER, INTENT(IN) :: childid !< 209 215 INTEGER, DIMENSION(:,:), INTENT(INOUT) :: index_list !< 210 216 … … 219 225 220 226 IF ( m_model_rank == 0 ) THEN 221 ! 222 !-- Sort to ascending server PE 227 228 ! 229 !-- Sort to ascending parent PE order 223 230 CALL pmc_sort( index_list, 6 ) 224 231 225 232 is = 1 226 233 DO ip = 0, m_model_npes-1 227 ! 228 !-- Split into server PEs 234 235 ! 236 !-- Split into parent PEs 229 237 ie = is - 1 238 230 239 ! 231 240 !-- There may be no entry for this PE … … 244 253 ian = 0 245 254 ENDIF 246 ! 247 !-- Send data to other server PEs 255 256 ! 257 !-- Send data to other parent PEs 248 258 IF ( ip == 0 ) THEN 249 indc lients(clientid)%nrpoints = ian259 indchildren(childid)%nrpoints = ian 250 260 IF ( ian > 0) THEN 251 ALLOCATE( indc lients(clientid)%index_list_2d(6,ian) )252 indc lients(clientid)%index_list_2d(:,1:ian) =&261 ALLOCATE( indchildren(childid)%index_list_2d(6,ian) ) 262 indchildren(childid)%index_list_2d(:,1:ian) = & 253 263 index_list(:,is:ie) 254 264 ENDIF 255 265 ELSE 256 CALL MPI_SEND( ian, 1, MPI_INTEGER, ip, 1000, m_model_comm, &266 CALL MPI_SEND( ian, 1, MPI_INTEGER, ip, 1000, m_model_comm, & 257 267 istat ) 258 268 IF ( ian > 0) THEN 259 CALL MPI_SEND( index_list(1,is), 6*ian, MPI_INTEGER, ip, &269 CALL MPI_SEND( index_list(1,is), 6*ian, MPI_INTEGER, ip, & 260 270 1001, m_model_comm, istat ) 261 271 ENDIF … … 267 277 ELSE 268 278 269 CALL MPI_RECV( indc lients(clientid)%nrpoints, 1, MPI_INTEGER, 0, 1000,&279 CALL MPI_RECV( indchildren(childid)%nrpoints, 1, MPI_INTEGER, 0, 1000, & 270 280 m_model_comm, MPI_STATUS_IGNORE, istat ) 271 ian = indc lients(clientid)%nrpoints281 ian = indchildren(childid)%nrpoints 272 282 273 283 IF ( ian > 0 ) THEN 274 ALLOCATE( indc lients(clientid)%index_list_2d(6,ian) )275 CALL MPI_RECV( indc lients(clientid)%index_list_2d, 6*ian,&276 MPI_INTEGER, 0, 1001, m_model_comm, &284 ALLOCATE( indchildren(childid)%index_list_2d(6,ian) ) 285 CALL MPI_RECV( indchildren(childid)%index_list_2d, 6*ian, & 286 MPI_INTEGER, 0, 1001, m_model_comm, & 277 287 MPI_STATUS_IGNORE, istat) 278 288 ENDIF … … 280 290 ENDIF 281 291 282 CALL set_pe_index_list( c lientid, clients(clientid),&283 indc lients(clientid)%index_list_2d,&284 indc lients(clientid)%nrpoints )292 CALL set_pe_index_list( childid, children(childid), & 293 indchildren(childid)%index_list_2d, & 294 indchildren(childid)%nrpoints ) 285 295 286 296 END SUBROUTINE pmc_s_set_2d_index_list … … 298 308 299 309 300 LOGICAL FUNCTION pmc_s_getnextarray( clientid, myname ) 310 LOGICAL FUNCTION pmc_s_getnextarray( childid, myname ) 311 301 312 ! 302 313 !-- List handling is still required to get minimal interaction with … … 304 315 !-- TODO: what does "still" mean? Is there a chance to change this! 305 316 CHARACTER(LEN=*), INTENT(OUT) :: myname !< 306 INTEGER(iwp), INTENT(IN) :: c lientid!<317 INTEGER(iwp), INTENT(IN) :: childid !< 307 318 308 319 TYPE(arraydef), POINTER :: ar … … 310 321 311 322 next_array_in_list = next_array_in_list + 1 312 ! 313 !-- Array names are the same on all client PEs, so take first PE to get the name 314 ape => clients(clientid)%pes(1) 323 324 ! 325 !-- Array names are the same on all children PEs, so take first PE to get the name 326 ape => children(childid)%pes(1) 315 327 316 328 IF ( next_array_in_list > ape%nr_arrays ) THEN 329 317 330 ! 318 331 !-- All arrays are done … … 323 336 ar => ape%array_list(next_array_in_list) 324 337 myname = ar%name 338 325 339 ! 326 340 !-- Return true if legal array … … 332 346 333 347 334 SUBROUTINE pmc_s_set_dataarray_2d( c lientid, array, array_2 )348 SUBROUTINE pmc_s_set_dataarray_2d( childid, array, array_2 ) 335 349 336 350 IMPLICIT NONE 337 351 338 INTEGER,INTENT(IN) :: c lientid!<352 INTEGER,INTENT(IN) :: childid !< 339 353 340 354 REAL(wp), INTENT(IN), DIMENSION(:,:), POINTER :: array !< … … 355 369 IF ( PRESENT( array_2 ) ) THEN 356 370 second_adr = C_LOC(array_2) 357 CALL pmc_s_setarray( c lientid, nrdims, dims, array_adr,&371 CALL pmc_s_setarray( childid, nrdims, dims, array_adr, & 358 372 second_adr = second_adr) 359 373 ELSE 360 CALL pmc_s_setarray( c lientid, nrdims, dims, array_adr )374 CALL pmc_s_setarray( childid, nrdims, dims, array_adr ) 361 375 ENDIF 362 376 … … 365 379 366 380 367 SUBROUTINE pmc_s_set_dataarray_3d( c lientid, array, nz_cl, nz, array_2 )381 SUBROUTINE pmc_s_set_dataarray_3d( childid, array, nz_cl, nz, array_2 ) 368 382 369 383 IMPLICIT NONE 370 384 371 INTEGER, INTENT(IN) :: c lientid!<385 INTEGER, INTENT(IN) :: childid !< 372 386 INTEGER, INTENT(IN) :: nz !< 373 387 INTEGER, INTENT(IN) :: nz_cl !< … … 381 395 TYPE(C_PTR) :: second_adr !< 382 396 397 ! 383 398 !-- TODO: the next assignment seems to be obsolete. Please check! 384 399 dims = 1 … … 397 412 IF ( PRESENT( array_2 ) ) THEN 398 413 second_adr = C_LOC( array_2 ) 399 CALL pmc_s_setarray( c lientid, nrdims, dims, array_adr,&414 CALL pmc_s_setarray( childid, nrdims, dims, array_adr, & 400 415 second_adr = second_adr) 401 416 ELSE 402 CALL pmc_s_setarray( c lientid, nrdims, dims, array_adr )417 CALL pmc_s_setarray( childid, nrdims, dims, array_adr ) 403 418 ENDIF 404 419 … … 407 422 408 423 409 SUBROUTINE pmc_s_setind_and_allocmem( c lientid )410 411 USE control_parameters, &424 SUBROUTINE pmc_s_setind_and_allocmem( childid ) 425 426 USE control_parameters, & 412 427 ONLY: message_string 413 428 … … 415 430 416 431 ! 417 !-- Naming convention for appendices: _ sc -> server to clienttransfer418 !-- _c s -> client to servertransfer419 !-- send -> server to clienttransfer420 !-- recv -> c lient to servertransfer421 INTEGER, INTENT(IN) :: c lientid!<432 !-- Naming convention for appendices: _pc -> parent to child transfer 433 !-- _cp -> child to parent transfer 434 !-- send -> parent to child transfer 435 !-- recv -> child to parent transfer 436 INTEGER, INTENT(IN) :: childid !< 422 437 423 438 INTEGER :: arlen !< … … 439 454 TYPE(arraydef), POINTER :: ar !< 440 455 441 REAL(wp),DIMENSION(:), POINTER, SAVE :: base_array_ sc !< base array for server to clienttransfer442 REAL(wp),DIMENSION(:), POINTER, SAVE :: base_array_c s !< base array for client to servertransfer443 444 ! 445 !-- Server to clientdirection456 REAL(wp),DIMENSION(:), POINTER, SAVE :: base_array_pc !< base array for parent to child transfer 457 REAL(wp),DIMENSION(:), POINTER, SAVE :: base_array_cp !< base array for child to parent transfer 458 459 ! 460 !-- Parent to child direction 446 461 myindex = 1 447 462 rcount = 0 … … 450 465 ! 451 466 !-- First stride: compute size and set index 452 DO i = 1, c lients(clientid)%inter_npes453 454 ape => c lients(clientid)%pes(i)467 DO i = 1, children(childid)%inter_npes 468 469 ape => children(childid)%pes(i) 455 470 tag = 200 456 471 … … 469 484 tag = tag + 1 470 485 rcount = rcount + 1 471 CALL MPI_ISEND( myindex, 1, MPI_INTEGER, i-1, tag, & 472 clients(clientid)%inter_comm, req(rcount), ierr ) 486 CALL MPI_ISEND( myindex, 1, MPI_INTEGER, i-1, tag, & 487 children(childid)%inter_comm, req(rcount), ierr ) 488 473 489 ! 474 490 !-- Maximum of 1024 outstanding requests 475 !-- TODO: what does this limit mean s?491 !-- TODO: what does this limit mean? 476 492 IF ( rcount == 1024 ) THEN 477 493 CALL MPI_WAITALL( rcount, req, MPI_STATUSES_IGNORE, ierr ) … … 492 508 493 509 ! 494 !-- Create RMA (One Sided Communication) window for data buffer serverto495 !-- c lienttransfer.510 !-- Create RMA (One Sided Communication) window for data buffer parent to 511 !-- child transfer. 496 512 !-- The buffer of MPI_GET (counterpart of transfer) can be PE-local, i.e. 497 513 !-- it can but must not be part of the MPI RMA window. Only one RMA window is 498 514 !-- required to prepare the data for 499 !-- server -> client transfer on the serverside515 !-- parent -> child transfer on the parent side 500 516 !-- and for 501 !-- c lient -> server transfer on the clientside502 CALL pmc_alloc_mem( base_array_ sc, bufsize )503 c lients(clientid)%totalbuffersize = bufsize * wp517 !-- child -> parent transfer on the child side 518 CALL pmc_alloc_mem( base_array_pc, bufsize ) 519 children(childid)%totalbuffersize = bufsize * wp 504 520 505 521 winsize = bufsize * wp 506 CALL MPI_WIN_CREATE( base_array_sc, winsize, wp, MPI_INFO_NULL, & 507 clients(clientid)%intra_comm, & 508 clients(clientid)%win_server_client, ierr ) 522 CALL MPI_WIN_CREATE( base_array_pc, winsize, wp, MPI_INFO_NULL, & 523 children(childid)%intra_comm, & 524 children(childid)%win_parent_child, ierr ) 525 509 526 ! 510 527 !-- Open window to set data 511 CALL MPI_WIN_FENCE( 0, clients(clientid)%win_server_client, ierr ) 528 CALL MPI_WIN_FENCE( 0, children(childid)%win_parent_child, ierr ) 529 512 530 ! 513 531 !-- Second stride: set buffer pointer 514 DO i = 1, c lients(clientid)%inter_npes515 516 ape => c lients(clientid)%pes(i)532 DO i = 1, children(childid)%inter_npes 533 534 ape => children(childid)%pes(i) 517 535 518 536 DO j = 1, ape%nr_arrays 519 537 520 538 ar => ape%array_list(j) 521 ar%sendbuf = C_LOC( base_array_sc(ar%sendindex) ) 522 523 !-- TODO: replace this by standard PALM error message using the message routine 524 IF ( ar%sendindex + ar%sendsize > bufsize ) THEN 525 write(0,'(a,i4,4i7,1x,a)') 'Server Buffer too small ',i, & 526 ar%sendindex,ar%sendsize,ar%sendindex+ar%sendsize,bufsize,trim(ar%name) 527 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr) 539 ar%sendbuf = C_LOC( base_array_pc(ar%sendindex) ) 540 541 IF ( ar%sendindex + ar%sendsize > bufsize ) THEN 542 WRITE( message_string, '(a,i4,4i7,1x,a)' ) & 543 'Parent buffer too small ',i, & 544 ar%sendindex,ar%sendsize,ar%sendindex+ar%sendsize, & 545 bufsize,trim(ar%name) 546 CALL message( 'pmc_s_setind_and_allocmem', 'PA0429', 3, 2, 0, 6, 0 ) 528 547 ENDIF 529 548 ENDDO … … 531 550 532 551 ! 533 !-- C lient to serverdirection552 !-- Child to parent direction 534 553 bufsize = 8 554 535 555 ! 536 556 !-- First stride: compute size and set index 537 DO i = 1, c lients(clientid)%inter_npes538 539 ape => c lients(clientid)%pes(i)557 DO i = 1, children(childid)%inter_npes 558 559 ape => children(childid)%pes(i) 540 560 tag = 300 541 561 … … 543 563 544 564 ar => ape%array_list(j) 545 ! 546 !-- Receive index from client 565 566 ! 567 !-- Receive index from child 547 568 tag = tag + 1 548 CALL MPI_RECV( myindex, 1, MPI_INTEGER, i-1, tag, &549 c lients(clientid)%inter_comm, MPI_STATUS_IGNORE, ierr )569 CALL MPI_RECV( myindex, 1, MPI_INTEGER, i-1, tag, & 570 children(childid)%inter_comm, MPI_STATUS_IGNORE, ierr ) 550 571 551 572 IF ( ar%nrdims == 3 ) THEN … … 564 585 !-- The buffer for MPI_GET can be PE local, i.e. it can but must not be part of 565 586 !-- the MPI RMA window 566 CALL pmc_alloc_mem( base_array_cs, bufsize, base_ptr ) 567 clients(clientid)%totalbuffersize = bufsize * wp 568 569 CALL MPI_BARRIER( clients(clientid)%intra_comm, ierr ) 587 CALL pmc_alloc_mem( base_array_cp, bufsize, base_ptr ) 588 children(childid)%totalbuffersize = bufsize * wp 589 590 CALL MPI_BARRIER( children(childid)%intra_comm, ierr ) 591 570 592 ! 571 593 !-- Second stride: set buffer pointer 572 DO i = 1, c lients(clientid)%inter_npes573 574 ape => c lients(clientid)%pes(i)594 DO i = 1, children(childid)%inter_npes 595 596 ape => children(childid)%pes(i) 575 597 576 598 DO j = 1, ape%nr_arrays … … 585 607 586 608 587 SUBROUTINE pmc_s_fillbuffer( c lientid, waittime )609 SUBROUTINE pmc_s_fillbuffer( childid, waittime ) 588 610 589 611 IMPLICIT NONE 590 612 591 INTEGER, INTENT(IN) :: c lientid!<613 INTEGER, INTENT(IN) :: childid !< 592 614 593 615 REAL(wp), INTENT(OUT), OPTIONAL :: waittime !< … … 612 634 613 635 ! 614 !-- Synchronization of the model is done in pmci_ client_synchronize and615 !-- pmci_server_synchronize.Therefor the RMA window can be filled without636 !-- Synchronization of the model is done in pmci_synchronize. 637 !-- Therefor the RMA window can be filled without 616 638 !-- sychronization at this point and a barrier is not necessary. 617 639 !-- Please note that waittime has to be set in pmc_s_fillbuffer AND … … 619 641 IF ( PRESENT( waittime) ) THEN 620 642 t1 = pmc_time() 621 CALL MPI_BARRIER( c lients(clientid)%intra_comm, ierr )643 CALL MPI_BARRIER( children(childid)%intra_comm, ierr ) 622 644 t2 = pmc_time() 623 645 waittime = t2- t1 624 646 ENDIF 625 647 626 DO ip = 1, c lients(clientid)%inter_npes627 628 ape => c lients(clientid)%pes(ip)648 DO ip = 1, children(childid)%inter_npes 649 650 ape => children(childid)%pes(ip) 629 651 630 652 DO j = 1, ape%nr_arrays … … 649 671 CALL C_F_POINTER( ar%data, data_3d, ar%a_dim(1:3) ) 650 672 DO ij = 1, ape%nrele 651 buf(myindex:myindex+ar%a_dim(4)-1) = &673 buf(myindex:myindex+ar%a_dim(4)-1) = & 652 674 data_3d(1:ar%a_dim(4),ape%locind(ij)%j,ape%locind(ij)%i) 653 675 myindex = myindex + ar%a_dim(4) … … 659 681 660 682 ENDDO 683 661 684 ! 662 685 !-- Buffer is filled 663 CALL MPI_BARRIER( c lients(clientid)%intra_comm, ierr )686 CALL MPI_BARRIER( children(childid)%intra_comm, ierr ) 664 687 665 688 END SUBROUTINE pmc_s_fillbuffer … … 667 690 668 691 669 SUBROUTINE pmc_s_getdata_from_buffer( c lientid, waittime )692 SUBROUTINE pmc_s_getdata_from_buffer( childid, waittime ) 670 693 671 694 IMPLICIT NONE 672 695 673 INTEGER, INTENT(IN) :: c lientid!<674 REAL(wp), INTENT(OUT), OPTIONAL :: waittime !<675 676 INTEGER :: ierr !<677 INTEGER :: ij !<678 INTEGER :: ip !<679 INTEGER :: istat !<680 INTEGER :: j !<681 INTEGER :: myindex !<682 INTEGER :: nr !<683 INTEGER :: target_pe !<684 INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp !<685 686 INTEGER, DIMENSION(1) :: buf_shape !<696 INTEGER, INTENT(IN) :: childid !< 697 REAL(wp), INTENT(OUT), OPTIONAL :: waittime !< 698 699 INTEGER :: ierr !< 700 INTEGER :: ij !< 701 INTEGER :: ip !< 702 INTEGER :: istat !< 703 INTEGER :: j !< 704 INTEGER :: myindex !< 705 INTEGER :: nr !< 706 INTEGER :: target_pe !< 707 INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp !< 708 709 INTEGER, DIMENSION(1) :: buf_shape !< 687 710 688 711 REAL(wp) :: t1 !< … … 697 720 698 721 t1 = pmc_time() 699 ! 700 !-- Wait for client to fill buffer 701 CALL MPI_BARRIER( clients(clientid)%intra_comm, ierr ) 722 723 ! 724 !-- Wait for child to fill buffer 725 CALL MPI_BARRIER( children(childid)%intra_comm, ierr ) 702 726 t2 = pmc_time() - t1 703 727 IF ( PRESENT( waittime ) ) waittime = t2 728 704 729 ! 705 730 !-- TODO: check next statement 706 731 !-- Fence might do it, test later 707 !-- CALL MPI_WIN_FENCE( 0, c lients(clientid)%win_server_client, ierr)708 CALL MPI_BARRIER( c lients(clientid)%intra_comm, ierr )709 710 DO ip = 1, c lients(clientid)%inter_npes711 712 ape => c lients(clientid)%pes(ip)732 !-- CALL MPI_WIN_FENCE( 0, children(childid)%win_parent_child, ierr) 733 CALL MPI_BARRIER( children(childid)%intra_comm, ierr ) 734 735 DO ip = 1, children(childid)%inter_npes 736 737 ape => children(childid)%pes(ip) 713 738 714 739 DO j = 1, ape%nr_arrays … … 731 756 IF ( nr > 0 ) THEN 732 757 target_disp = ar%recvindex - 1 733 ! 734 !-- Client PEs are located behind server PEs 758 759 ! 760 !-- Child PEs are located behind parent PEs 735 761 target_pe = ip - 1 + m_model_npes 736 CALL MPI_WIN_LOCK( MPI_LOCK_SHARED, target_pe, 0, &737 c lients(clientid)%win_server_client, ierr )738 CALL MPI_GET( buf, nr, MPI_REAL, target_pe, target_disp, nr, &739 MPI_REAL, c lients(clientid)%win_server_client, ierr )740 CALL MPI_WIN_UNLOCK( target_pe, &741 c lients(clientid)%win_server_client, ierr )762 CALL MPI_WIN_LOCK( MPI_LOCK_SHARED, target_pe, 0, & 763 children(childid)%win_parent_child, ierr ) 764 CALL MPI_GET( buf, nr, MPI_REAL, target_pe, target_disp, nr, & 765 MPI_REAL, children(childid)%win_parent_child, ierr ) 766 CALL MPI_WIN_UNLOCK( target_pe, & 767 children(childid)%win_parent_child, ierr ) 742 768 ENDIF 743 769 … … 755 781 CALL C_F_POINTER( ar%data, data_3d, ar%a_dim(1:3)) 756 782 DO ij = 1, ape%nrele 757 data_3d(1:ar%a_dim(4),ape%locind(ij)%j,ape%locind(ij)%i) = &783 data_3d(1:ar%a_dim(4),ape%locind(ij)%j,ape%locind(ij)%i) = & 758 784 buf(myindex:myindex+ar%a_dim(4)-1) 759 785 myindex = myindex + ar%a_dim(4) … … 770 796 771 797 772 SUBROUTINE get_da_names_from_client( clientid ) 773 ! 774 !-- Get data array description and name from client 798 SUBROUTINE get_da_names_from_child( childid ) 799 800 ! 801 !-- Get data array description and name from child 775 802 IMPLICIT NONE 776 803 777 INTEGER, INTENT(IN) :: c lientid !<804 INTEGER, INTENT(IN) :: childid !< 778 805 779 806 TYPE(da_namedef) :: myname !< 780 807 781 808 DO 782 CALL pmc_bcast( myname%couple_index, 0, comm=m_to_c lient_comm(clientid) )809 CALL pmc_bcast( myname%couple_index, 0, comm=m_to_child_comm(childid) ) 783 810 IF ( myname%couple_index == -1 ) EXIT 784 CALL pmc_bcast( myname% serverdesc, 0, comm=m_to_client_comm(clientid) )785 CALL pmc_bcast( myname%nameon server, 0, comm=m_to_client_comm(clientid) )786 CALL pmc_bcast( myname%c lientdesc, 0, comm=m_to_client_comm(clientid) )787 CALL pmc_bcast( myname%nameonc lient, 0, comm=m_to_client_comm(clientid) )788 789 CALL pmc_g_setname( c lients(clientid), myname%couple_index,&790 myname%nameon server)811 CALL pmc_bcast( myname%parentdesc, 0, comm=m_to_child_comm(childid) ) 812 CALL pmc_bcast( myname%nameonparent, 0, comm=m_to_child_comm(childid) ) 813 CALL pmc_bcast( myname%childdesc, 0, comm=m_to_child_comm(childid) ) 814 CALL pmc_bcast( myname%nameonchild, 0, comm=m_to_child_comm(childid) ) 815 816 CALL pmc_g_setname( children(childid), myname%couple_index, & 817 myname%nameonparent ) 791 818 ENDDO 792 819 793 END SUBROUTINE get_da_names_from_client 794 795 796 797 SUBROUTINE pmc_s_setarray(clientid, nrdims, dims, array_adr, second_adr ) 798 ! 799 !-- Set array for client interPE 0 820 END SUBROUTINE get_da_names_from_child 821 822 823 824 SUBROUTINE pmc_s_setarray(childid, nrdims, dims, array_adr, second_adr ) 825 826 ! 827 !-- Set array for child inter PE 0 800 828 IMPLICIT NONE 801 829 802 INTEGER, INTENT(IN) :: c lientid!<803 INTEGER, INTENT(IN) :: nrdims !<804 INTEGER, INTENT(IN), DIMENSION(:) :: dims !<830 INTEGER, INTENT(IN) :: childid !< 831 INTEGER, INTENT(IN) :: nrdims !< 832 INTEGER, INTENT(IN), DIMENSION(:) :: dims !< 805 833 806 834 TYPE(C_PTR), INTENT(IN) :: array_adr !< … … 813 841 814 842 815 DO i = 1, c lients(clientid)%inter_npes816 817 ape => c lients(clientid)%pes(i)843 DO i = 1, children(childid)%inter_npes 844 845 ape => children(childid)%pes(i) 818 846 ar => ape%array_list(next_array_in_list) 819 847 ar%nrdims = nrdims … … 835 863 836 864 837 SUBROUTINE pmc_s_set_active_data_array( c lientid, iactive )865 SUBROUTINE pmc_s_set_active_data_array( childid, iactive ) 838 866 839 867 IMPLICIT NONE 840 868 841 INTEGER, INTENT(IN) :: c lientid!<869 INTEGER, INTENT(IN) :: childid !< 842 870 INTEGER, INTENT(IN) :: iactive !< 843 871 … … 849 877 TYPE(arraydef), POINTER :: ar !< 850 878 851 DO ip = 1, c lients(clientid)%inter_npes852 853 ape => c lients(clientid)%pes(ip)879 DO ip = 1, children(childid)%inter_npes 880 881 ape => children(childid)%pes(ip) 854 882 855 883 DO j = 1, ape%nr_arrays … … 868 896 869 897 870 SUBROUTINE set_pe_index_list( c lientid, myclient, index_list, nrp )898 SUBROUTINE set_pe_index_list( childid, mychild, index_list, nrp ) 871 899 872 900 IMPLICIT NONE 873 901 874 INTEGER, INTENT(IN) :: c lientid!<902 INTEGER, INTENT(IN) :: childid !< 875 903 INTEGER, INTENT(IN), DIMENSION(:,:) :: index_list !< 876 904 INTEGER, INTENT(IN) :: nrp !< 877 905 878 TYPE(c lientdef), INTENT(INOUT) :: myclient!<906 TYPE(childdef), INTENT(INOUT) :: mychild !< 879 907 880 908 INTEGER :: i !< … … 888 916 INTEGER(KIND=MPI_ADDRESS_KIND) :: winsize !< 889 917 890 INTEGER, DIMENSION(myc lient%inter_npes):: remind !<918 INTEGER, DIMENSION(mychild%inter_npes) :: remind !< 891 919 892 920 INTEGER, DIMENSION(:), POINTER :: remindw !< … … 896 924 897 925 ! 898 !-- First, count entries for every remote c lientPE899 DO i = 1, myc lient%inter_npes900 ape => myc lient%pes(i)926 !-- First, count entries for every remote child PE 927 DO i = 1, mychild%inter_npes 928 ape => mychild%pes(i) 901 929 ape%nrele = 0 902 930 ENDDO 931 903 932 ! 904 933 !-- Loop over number of coarse grid cells 905 934 DO j = 1, nrp 906 935 rempe = index_list(5,j) + 1 ! PE number on remote PE 907 ape => myc lient%pes(rempe)908 ape%nrele = ape%nrele + 1 ! Increment number of elements for this clientPE909 ENDDO 910 911 DO i = 1, myc lient%inter_npes912 ape => myc lient%pes(i)936 ape => mychild%pes(rempe) 937 ape%nrele = ape%nrele + 1 ! Increment number of elements for this child PE 938 ENDDO 939 940 DO i = 1, mychild%inter_npes 941 ape => mychild%pes(i) 913 942 ALLOCATE( ape%locind(ape%nrele) ) 914 943 ENDDO … … 921 950 DO j = 1, nrp 922 951 rempe = index_list(5,j) + 1 923 ape => myc lient%pes(rempe)952 ape => mychild%pes(rempe) 924 953 remind(rempe) = remind(rempe)+1 925 954 ind = remind(rempe) … … 927 956 ape%locind(ind)%j = index_list(2,j) 928 957 ENDDO 929 ! 930 !-- Prepare number of elements for client PEs 931 CALL pmc_alloc_mem( rldef, myclient%inter_npes*2 ) 932 ! 933 !-- Number of client PEs * size of INTEGER (i just arbitrary INTEGER) 934 winsize = myclient%inter_npes*c_sizeof(i)*2 935 936 CALL MPI_WIN_CREATE( rldef, winsize, iwp, MPI_INFO_NULL, & 937 myclient%intra_comm, indwin, ierr ) 958 959 ! 960 !-- Prepare number of elements for children PEs 961 CALL pmc_alloc_mem( rldef, mychild%inter_npes*2 ) 962 963 ! 964 !-- Number of child PEs * size of INTEGER (i just arbitrary INTEGER) 965 winsize = mychild%inter_npes*c_sizeof(i)*2 966 967 CALL MPI_WIN_CREATE( rldef, winsize, iwp, MPI_INFO_NULL, & 968 mychild%intra_comm, indwin, ierr ) 969 938 970 ! 939 971 !-- Open window to set data … … 942 974 rldef(1) = 0 ! index on remote PE 0 943 975 rldef(2) = remind(1) ! number of elements on remote PE 0 976 944 977 ! 945 978 !-- Reserve buffer for index array 946 DO i = 2, myc lient%inter_npes979 DO i = 2, mychild%inter_npes 947 980 i2 = (i-1) * 2 + 1 948 981 rldef(i2) = rldef(i2-2) + rldef(i2-1) * 2 ! index on remote PE 949 rldef(i2+1) = remind(i) ! number of elements on remote PE 950 ENDDO 951 ! 952 !-- Close window to allow client to access data 982 rldef(i2+1) = remind(i) ! number of elements on remote PE 983 ENDDO 984 985 ! 986 !-- Close window to allow child to access data 953 987 CALL MPI_WIN_FENCE( 0, indwin, ierr ) 954 ! 955 !-- Client has retrieved data 988 989 ! 990 !-- Child has retrieved data 956 991 CALL MPI_WIN_FENCE( 0, indwin, ierr ) 957 992 958 i2 = 2 * myc lient%inter_npes - 1993 i2 = 2 * mychild%inter_npes - 1 959 994 winsize = ( rldef(i2) + rldef(i2+1) ) * 2 995 960 996 ! 961 997 !-- Make sure, MPI_ALLOC_MEM works … … 965 1001 966 1002 CALL MPI_BARRIER( m_model_comm, ierr ) 967 CALL MPI_WIN_CREATE( remindw, winsize*c_sizeof(i), iwp, MPI_INFO_NULL, &968 myc lient%intra_comm, indwin2, ierr )1003 CALL MPI_WIN_CREATE( remindw, winsize*c_sizeof(i), iwp, MPI_INFO_NULL, & 1004 mychild%intra_comm, indwin2, ierr ) 969 1005 ! 970 1006 !-- Open window to set data 971 1007 CALL MPI_WIN_FENCE( 0, indwin2, ierr ) 1008 972 1009 ! 973 1010 !-- Create the 2D index list 974 1011 DO j = 1, nrp 975 1012 rempe = index_list(5,j) + 1 ! PE number on remote PE 976 ape => myc lient%pes(rempe)1013 ape => mychild%pes(rempe) 977 1014 i2 = rempe * 2 - 1 978 1015 ind = rldef(i2) + 1 … … 981 1018 rldef(i2) = rldef(i2)+2 982 1019 ENDDO 983 ! 984 !-- All data areset 1020 1021 ! 1022 !-- All data are set 985 1023 CALL MPI_WIN_FENCE( 0, indwin2, ierr ) 1024 986 1025 ! 987 1026 !-- Don't know why, but this barrier is necessary before windows can be freed 988 1027 !-- TODO: find out why this is required 989 CALL MPI_BARRIER( myc lient%intra_comm, ierr )1028 CALL MPI_BARRIER( mychild%intra_comm, ierr ) 990 1029 991 1030 CALL MPI_WIN_FREE( indwin, ierr ) 992 1031 CALL MPI_WIN_FREE( indwin2, ierr ) 993 1032 1033 ! 994 1034 !-- TODO: check if the following idea needs to be done 995 1035 !-- Sollte funktionieren, Problem mit MPI implementation … … 1000 1040 1001 1041 #endif 1002 END MODULE pmc_ server1042 END MODULE pmc_parent -
palm/trunk/SOURCE/pres.f90
r1932 r1933 24 24 ! ----------------- 25 25 ! $Id$ 26 ! 27 ! 1932 2016-06-10 12:09:21Z suehring 28 ! Initial version of purely vertical nesting introduced. 26 29 ! 27 30 ! 1931 2016-06-10 12:06:59Z suehring … … 127 130 gathered_size, ibc_p_b, ibc_p_t, intermediate_timestep_count, & 128 131 intermediate_timestep_count_max, mg_switch_to_pe0_level, & 129 nest_domain, nest_bound_l, nest_bound_n, nest_bound_r, & 130 nest_bound_s, on_device, outflow_l, outflow_n, outflow_r, & 132 nest_domain, on_device, outflow_l, outflow_n, outflow_r, & 131 133 outflow_s, psolver, subdomain_size, topography, volume_flow, & 132 134 volume_flow_area, volume_flow_initial … … 147 149 148 150 USE pegrid 151 152 USE pmc_interface, & 153 ONLY: nesting_mode 149 154 150 155 USE poisfft_mod, & … … 174 179 REAL(wp), DIMENSION(1:nzt) :: w_l !< 175 180 REAL(wp), DIMENSION(1:nzt) :: w_l_l !< 181 182 LOGICAL :: nest_domain_nvn !< 176 183 177 184 … … 312 319 ! 313 320 !-- Remove mean vertical velocity in case that Neumann conditions are 314 !-- used both at bottom and top boundary, and if not a nested domain. 321 !-- used both at bottom and top boundary, and if not a nested domain in a 322 !-- normal nesting run. In case of vertical nesting, this must be done. 323 !-- Therefore an auxiliary logical variable nest_domain_nvn is used here, and 324 !-- nvn stands for non-vertical nesting. 315 325 !-- This cannot be done before the first initial time step because ngp_2dh_outer 316 326 !-- is not yet known then. 317 IF ( ibc_p_b == 1 .AND. ibc_p_t == 1 .AND. .NOT. nest_domain .AND. & 318 intermediate_timestep_count /= 0 ) & 327 nest_domain_nvn = nest_domain 328 IF ( nest_domain .AND. nesting_mode == 'vertical' ) THEN 329 nest_domain_nvn = .FALSE. 330 ENDIF 331 332 IF ( ibc_p_b == 1 .AND. ibc_p_t == 1 .AND. & 333 .NOT. nest_domain_nvn .AND. intermediate_timestep_count /= 0 ) & 319 334 THEN 320 335 w_l = 0.0_wp; w_l_l = 0.0_wp -
palm/trunk/SOURCE/time_integration.f90
r1928 r1933 25 25 ! $Id$ 26 26 ! 27 ! 1927 2016-06-07 11:56:53Z hellstea 28 ! Synchronization moved before CALL run_control. Exchange_horiz for pt after 29 ! CALL pmci_datatrans is now only called if ( .NOT. neutral ). 27 ! 1919 2016-05-27 14:51:23Z raasch 28 ! Initial version of purely vertical nesting introduced. 30 29 ! 31 30 ! 1918 2016-05-27 14:35:57Z raasch … … 298 297 299 298 USE pmc_interface, & 300 ONLY: client_to_server, nested_run, nesting_mode, & 301 pmci_datatrans, pmci_ensure_nest_mass_conservation, & 302 pmci_synchronize, server_to_client 299 ONLY: nested_run, nesting_mode, pmci_datatrans, & 300 pmci_ensure_nest_mass_conservation, pmci_synchronize 303 301 304 302 USE production_e_mod, & … … 726 724 CALL cpu_log( log_point(60), 'nesting', 'start' ) 727 725 ! 728 !-- Domain nesting. The data transfer subroutines pmci_ server_datatrans729 !-- and pmci_c lient_datatatrans are called inside the wrapper726 !-- Domain nesting. The data transfer subroutines pmci_parent_datatrans 727 !-- and pmci_child_datatrans are called inside the wrapper 730 728 !-- subroutine pmci_datatrans according to the control parameters 731 729 !-- nesting_mode and nesting_datatransfer_mode. … … 733 731 CALL pmci_datatrans( nesting_mode ) 734 732 735 IF ( nesting_mode == 'two-way' ) THEN 736 ! 737 !-- Exchange_horiz is needed for all server-domains after the 733 IF ( TRIM( nesting_mode ) == 'two-way' .OR. & 734 nesting_mode == 'vertical' ) THEN 735 ! 736 !-- Exchange_horiz is needed for all parent-domains after the 738 737 !-- anterpolation 739 738 CALL exchange_horiz( u, nbgp ) … … 750 749 ! 751 750 !-- Correct the w top-BC in nest domains to ensure mass conservation. 752 !-- This action must never be done for the root domain. 751 !-- This action must never be done for the root domain. Vertical 752 !-- nesting implies mass conservation. 753 753 IF ( nest_domain ) THEN 754 754 CALL pmci_ensure_nest_mass_conservation
Note: See TracChangeset
for help on using the changeset viewer.